Initial commit: Ollama MCP server
MCP server exposing local Ollama models via LiteLLM proxy to Claude Code. Tools: query_local_model, review_code, summarize, generate_boilerplate, list_models. Deployed to k8s ai-inference namespace via ArgoCD. Co-Authored-By: Claude Sonnet 4.6 <noreply@anthropic.com>
This commit is contained in:
43
k8s/argocd-app.yaml
Normal file
43
k8s/argocd-app.yaml
Normal file
@@ -0,0 +1,43 @@
|
||||
---
|
||||
# ArgoCD Application - deploy this once to bootstrap:
|
||||
# kubectl apply -f k8s/argocd-app.yaml
|
||||
#
|
||||
# Pre-requisite: Add the repo to ArgoCD first:
|
||||
# argocd repo add https://repo.adservio.us/ai_approver/ollama-mcp.git \
|
||||
# --username <gitea-user> --password <gitea-token>
|
||||
apiVersion: argoproj.io/v1alpha1
|
||||
kind: Application
|
||||
metadata:
|
||||
name: ollama-mcp
|
||||
namespace: argocd
|
||||
finalizers:
|
||||
- resources-finalizer.argocd.argoproj.io
|
||||
spec:
|
||||
project: default
|
||||
source:
|
||||
repoURL: https://repo.adservio.us/ai_approver/ollama-mcp.git
|
||||
targetRevision: main
|
||||
path: k8s
|
||||
directory:
|
||||
exclude: argocd-app.yaml
|
||||
destination:
|
||||
server: https://kubernetes.default.svc
|
||||
namespace: ai-inference
|
||||
syncPolicy:
|
||||
automated:
|
||||
prune: true
|
||||
selfHeal: true
|
||||
allowEmpty: false
|
||||
syncOptions:
|
||||
- CreateNamespace=true
|
||||
retry:
|
||||
limit: 5
|
||||
backoff:
|
||||
duration: 5s
|
||||
factor: 2
|
||||
maxDuration: 3m
|
||||
ignoreDifferences:
|
||||
- group: apps
|
||||
kind: Deployment
|
||||
jsonPointers:
|
||||
- /spec/replicas
|
||||
Reference in New Issue
Block a user