Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
5 changes: 5 additions & 0 deletions packages/inference/src/lib/getProviderHelper.ts
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@ import * as Fireworks from "../providers/fireworks-ai.js";
import * as Groq from "../providers/groq.js";
import * as HFInference from "../providers/hf-inference.js";
import * as Hyperbolic from "../providers/hyperbolic.js";
import * as Latitude from "../providers/latitude.js";
import * as Nebius from "../providers/nebius.js";
import * as Novita from "../providers/novita.js";
import * as Nscale from "../providers/nscale.js";
Expand Down Expand Up @@ -132,6 +133,10 @@ export const PROVIDERS: Record<InferenceProvider, Partial<Record<InferenceTask,
conversational: new Hyperbolic.HyperbolicConversationalTask(),
"text-generation": new Hyperbolic.HyperbolicTextGenerationTask(),
},
"latitude-sh": {
conversational: new Latitude.LatitudeConversationalTask(),
"text-generation": new Latitude.LatitudeTextGenerationTask(),
},
nebius: {
"text-to-image": new Nebius.NebiusTextToImageTask(),
conversational: new Nebius.NebiusConversationalTask(),
Expand Down
1 change: 1 addition & 0 deletions packages/inference/src/providers/consts.ts
Original file line number Diff line number Diff line change
Expand Up @@ -29,6 +29,7 @@ export const HARDCODED_MODEL_INFERENCE_MAPPING: Record<
groq: {},
"hf-inference": {},
hyperbolic: {},
"latitude-sh": {},
nebius: {},
novita: {},
nscale: {},
Expand Down
30 changes: 30 additions & 0 deletions packages/inference/src/providers/latitude.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,30 @@
/**
* See the registered mapping of HF model ID => Latitude model ID here:
*
* https://huggingface.co/api/partners/latitude-sh/models
*
* This is a publicly available mapping.
*
* If you want to try to run inference for a new model locally before it's registered on huggingface.co,
* you can add it to the dictionary "HARDCODED_MODEL_ID_MAPPING" in consts.ts, for dev purposes.
*
* - If you work at Latitude and want to update this mapping, please use the model mapping API we provide on huggingface.co
* - If you're a community member and want to add a new supported HF model to Latitude, please open an issue on the present repo
* and we will tag Latitude team members.
*
* Thanks!
*/

import { BaseConversationalTask, BaseTextGenerationTask } from "./providerHelper.js";

export class LatitudeConversationalTask extends BaseConversationalTask {
constructor() {
super("latitude-sh", "https://api.lsh.ai");
}
}

export class LatitudeTextGenerationTask extends BaseTextGenerationTask {
constructor() {
super("latitude-sh", "https://api.lsh.ai");
}
}
2 changes: 2 additions & 0 deletions packages/inference/src/types.ts
Original file line number Diff line number Diff line change
Expand Up @@ -56,6 +56,7 @@ export const INFERENCE_PROVIDERS = [
"groq",
"hf-inference",
"hyperbolic",
"latitude-sh",
"nebius",
"novita",
"nscale",
Expand Down Expand Up @@ -93,6 +94,7 @@ export const PROVIDERS_HUB_ORGS: Record<InferenceProvider, string> = {
groq: "groq",
"hf-inference": "hf-inference",
hyperbolic: "Hyperbolic",
"latitude-sh": "latitude-sh",
nebius: "nebius",
novita: "novita",
nscale: "nscale",
Expand Down