Skip to content

Commit

Permalink
Merge pull request #72 from bjwswang/llms
Browse files Browse the repository at this point in the history
fix: reconcile logic bugs in prompt and llm
  • Loading branch information
bjwswang authored Aug 30, 2023
2 parents 065071a + e4331bd commit 8e48dde
Show file tree
Hide file tree
Showing 12 changed files with 129 additions and 275 deletions.
2 changes: 0 additions & 2 deletions api/v1alpha1/condition.go
Original file line number Diff line number Diff line change
Expand Up @@ -34,8 +34,6 @@ const (
TypeUnknown ConditionType = "Unknown"
// TypeDone resources are believed to be processed
TypeDone ConditionType = "Done"
// TypeUnavailable resources are unavailable
TypeUnavailable ConditionType = "Unavailable"
)

// A ConditionReason represents the reason a resource is in a condition.
Expand Down
50 changes: 0 additions & 50 deletions config/arcadia.kubeagi.k8s.com.cn_laboratories.yaml

This file was deleted.

100 changes: 0 additions & 100 deletions config/arcadia.kubeagi.k8s.com.cn_llms.yaml

This file was deleted.

84 changes: 0 additions & 84 deletions config/arcadia.kubeagi.k8s.com.cn_prompts.yaml

This file was deleted.

47 changes: 32 additions & 15 deletions controllers/llm_controller.go
Original file line number Diff line number Diff line change
Expand Up @@ -18,10 +18,11 @@ package controllers

import (
"context"
"fmt"
"reflect"

"github.com/go-logr/logr"
"github.com/kubeagi/arcadia/pkg/llms"
"github.com/kubeagi/arcadia/pkg/llms/openai"
"github.com/kubeagi/arcadia/pkg/llms/zhipuai"
corev1 "k8s.io/api/core/v1"
"k8s.io/apimachinery/pkg/api/errors"
Expand All @@ -30,6 +31,7 @@ import (
ctrl "sigs.k8s.io/controller-runtime"
"sigs.k8s.io/controller-runtime/pkg/builder"
"sigs.k8s.io/controller-runtime/pkg/client"
"sigs.k8s.io/controller-runtime/pkg/event"
"sigs.k8s.io/controller-runtime/pkg/log"
"sigs.k8s.io/controller-runtime/pkg/predicate"
"sigs.k8s.io/controller-runtime/pkg/reconcile"
Expand Down Expand Up @@ -73,10 +75,11 @@ func (r *LLMReconciler) Reconcile(ctx context.Context, req ctrl.Request) (ctrl.R

err = r.CheckLLM(ctx, logger, instance)
if err != nil {
logger.Error(err, "Failed to check LLM")
// Update conditioned status
return ctrl.Result{}, err
}

logger.Info("Instance is updated and synchronized")
return ctrl.Result{}, nil
}

Expand All @@ -92,37 +95,40 @@ func (r *LLMReconciler) CheckLLM(ctx context.Context, logger logr.Logger, instan
logger.Info("Checking LLM instance")
// Check new URL/Auth availability
var err error
var response llms.Response

apiKey, err := instance.AuthAPIKey(ctx, r.Client)
if err != nil {
return err
return r.UpdateStatus(ctx, instance, nil, err)
}

var llmClient llms.LLM
switch instance.Spec.Type {
case llms.OpenAI:
// validator := openai.NewOpenAI(apiKey)
// response, err = validator.Validate()
return fmt.Errorf("openAI not implemented yet")
llmClient = openai.NewOpenAI(apiKey)
case llms.ZhiPuAI:
validator := zhipuai.NewZhiPuAI(apiKey)
response, err = validator.Validate()
llmClient = zhipuai.NewZhiPuAI(apiKey)
default:
return fmt.Errorf("unknown LLM type: %s", instance.Spec.Type)
llmClient = llms.NewUnknowLLM()
}

response, err := llmClient.Validate()
return r.UpdateStatus(ctx, instance, response, err)
}

func (r *LLMReconciler) UpdateStatus(ctx context.Context, instance *arcadiav1alpha1.LLM, response llms.Response, err error) error {
instanceCopy := instance.DeepCopy()
if err != nil {
// Set status to unavailable
instance.Status.SetConditions(arcadiav1alpha1.Condition{
Type: arcadiav1alpha1.TypeUnavailable,
instanceCopy.Status.SetConditions(arcadiav1alpha1.Condition{
Type: arcadiav1alpha1.TypeReady,
Status: corev1.ConditionFalse,
Reason: arcadiav1alpha1.ReasonUnavailable,
Message: err.Error(),
LastTransitionTime: metav1.Now(),
})
} else {
// Set status to available
instance.Status.SetConditions(arcadiav1alpha1.Condition{
instanceCopy.Status.SetConditions(arcadiav1alpha1.Condition{
Type: arcadiav1alpha1.TypeReady,
Status: corev1.ConditionTrue,
Reason: arcadiav1alpha1.ReasonAvailable,
Expand All @@ -131,10 +137,21 @@ func (r *LLMReconciler) CheckLLM(ctx context.Context, logger logr.Logger, instan
LastSuccessfulTime: metav1.Now(),
})
}

return r.Client.Status().Update(ctx, instance)
return r.Client.Status().Update(ctx, instanceCopy)
}

type LLMPredicates struct {
predicate.Funcs
}

func (llm LLMPredicates) Create(ce event.CreateEvent) bool {
prompt := ce.Object.(*arcadiav1alpha1.LLM)
return len(prompt.Status.ConditionedStatus.Conditions) == 0
}

func (llm LLMPredicates) Update(ue event.UpdateEvent) bool {
oldLLM := ue.ObjectOld.(*arcadiav1alpha1.LLM)
newLLM := ue.ObjectNew.(*arcadiav1alpha1.LLM)

return !reflect.DeepEqual(oldLLM.Spec, newLLM.Spec)
}
Loading

0 comments on commit 8e48dde

Please sign in to comment.