@@ -2,7 +2,12 @@ import { Hono } from "@hono/hono";
22import { bearerAuth } from "@hono/hono/bearer-auth" ;
33import { assert , is } from "@core/unknownutil" ;
44import { parseURL } from "ufo" ;
5- import { chooseEndpoint , convertToCustomEndpoint , isCursorRequest , isOpenAIModel } from "./util.ts" ;
5+ import {
6+ chooseEndpoint ,
7+ convertToCustomEndpoint ,
8+ isCursorRequest ,
9+ isOpenAIModel ,
10+ } from "./util.ts" ;
611
712/**
813 * A class representing a proxy application that handles requests to OpenAI and Ollama endpoints.
@@ -21,7 +26,7 @@ class ProxyApp {
2126 constructor (
2227 openAIEndpoint : string ,
2328 ollamaEndpoint : string ,
24- OPENAI_API_KEY : string | undefined
29+ OPENAI_API_KEY : string | undefined ,
2530 ) {
2631 this . openAIEndpoint = openAIEndpoint ;
2732 this . ollamaEndpoint = ollamaEndpoint ;
@@ -34,26 +39,37 @@ class ProxyApp {
3439 * @param next - A function to execute the next middleware in the chain.
3540 * @returns A Promise that resolves to the response object.
3641 */
37- private handleAllRequest ( c : any , next : ( ) => Promise < any > ) : Response | Promise < void | Response > {
42+ private handleAllRequest (
43+ c : any ,
44+ next : ( ) => Promise < any > ,
45+ ) : Response | Promise < void | Response > {
3846 if ( c . req . method === "OPTIONS" ) {
3947 return this . handleOptionsRequest ( c ) ;
4048 }
4149 if ( ! this . OPENAI_API_KEY ) {
42- return new Response ( 'Unauthorized - API key is required' , { status : 401 } ) ;
50+ return new Response ( "Unauthorized - API key is required" , {
51+ status : 401 ,
52+ } ) ;
4353 }
44-
45- return bearerAuth ( { token : this . OPENAI_API_KEY . toString ( ) } ) ( c , async ( ) => {
46- // Execute subsequent middleware
47- await next ( ) ;
48- // Add CORS headers to the response
49- if ( c . res ) {
50- c . res = new Response ( c . res . body , {
51- status : c . res . status ,
52- headers : this . setCORSHeaders ( c . res , c . req . raw . headers . get ( 'origin' ) )
53- } ) ;
54- }
55- return c . res ;
56- } ) ;
54+
55+ return bearerAuth ( { token : this . OPENAI_API_KEY . toString ( ) } ) (
56+ c ,
57+ async ( ) => {
58+ // Execute subsequent middleware
59+ await next ( ) ;
60+ // Add CORS headers to the response
61+ if ( c . res ) {
62+ c . res = new Response ( c . res . body , {
63+ status : c . res . status ,
64+ headers : this . setCORSHeaders (
65+ c . res ,
66+ c . req . raw . headers . get ( "origin" ) ,
67+ ) ,
68+ } ) ;
69+ }
70+ return c . res ;
71+ } ,
72+ ) ;
5773 }
5874
5975 /**
@@ -64,43 +80,46 @@ class ProxyApp {
6480 */
6581 private setCORSHeaders ( res : Response , origin : string ) : Headers {
6682 const headers = new Headers ( res . headers ) ;
67- headers . set ( ' Access-Control-Allow-Origin' , origin || '*' ) ;
68- headers . set ( ' Access-Control-Allow-Methods' , ' POST, GET, OPTIONS' ) ;
69- headers . set ( ' Access-Control-Allow-Headers' , ' Content-Type, Authorization' ) ;
70- headers . set ( ' Access-Control-Allow-Credentials' , ' true' ) ;
83+ headers . set ( " Access-Control-Allow-Origin" , origin || "*" ) ;
84+ headers . set ( " Access-Control-Allow-Methods" , " POST, GET, OPTIONS" ) ;
85+ headers . set ( " Access-Control-Allow-Headers" , " Content-Type, Authorization" ) ;
86+ headers . set ( " Access-Control-Allow-Credentials" , " true" ) ;
7187 return headers ;
7288 }
7389 /**
74- * Handles model verification requests
75- * @param c - The context object
76- * @param json - The request body
77- * @returns Response or null if verification succeeds
78- */
90+ * Handles model verification requests
91+ * @param c - The context object
92+ * @param json - The request body
93+ * @returns Response or null if verification succeeds
94+ */
7995 private async handleOpenAIModelVerify ( c : any , json : any ) {
8096 if ( this . isVerifyRequest ( c , json ) ) {
81- const models = await this . getModelsList ( )
82- const modelName = models . find ( ( model : any ) => model . name === json . model ) ?. name || models [ 0 ] ?. name ;
97+ const models = await this . getModelsList ( ) ;
98+ const modelName = models . find ( ( model : any ) =>
99+ model . name === json . model
100+ ) ?. name || models [ 0 ] ?. name ;
83101 if ( modelName ) {
84102 json . model = modelName ;
85103 return null ;
86- } else {
104+ } else {
87105 console . error ( `Ollama Model not found: ${ json . model } ` ) ;
88106 return new Response ( JSON . stringify ( { error : "Model not found" } ) , {
89107 status : 404 ,
90- headers : { ' Content-Type' : ' application/json' }
108+ headers : { " Content-Type" : " application/json" } ,
91109 } ) ;
92110 }
93111 }
94112 }
95113 /**
96114 * 验证是否是Cursor的请求
97- * @param c
98- * @param json
99- * @returns
115+ * @param c
116+ * @param json
117+ * @returns
100118 */
101119 private isVerifyRequest ( c : any , json : any ) {
102- const origin = c . req . raw . headers . get ( 'Origin' ) ;
103- return isOpenAIModel ( json . model ) && isCursorRequest ( origin ) && json . stream === false ;
120+ const origin = c . req . raw . headers . get ( "Origin" ) ;
121+ return isOpenAIModel ( json . model ) && isCursorRequest ( origin ) &&
122+ json . stream === false ;
104123 }
105124 /**
106125 * Handles POST requests.
@@ -125,15 +144,15 @@ class ProxyApp {
125144 const url = convertToCustomEndpoint ( c . req . url , parseURL ( endpoint ) ) ;
126145 const reqHeaders = this . setCORSHeaders ( c . req . raw , origin ) ;
127146 // 设置Origin头, 否则ollama会报错
128- reqHeaders . set ( ' Origin' , origin ) ;
147+ reqHeaders . set ( " Origin" , origin ) ;
129148 const req = new Request ( url , {
130149 ...c . req . raw ,
131150 method : "POST" ,
132151 body : JSON . stringify ( json ) ,
133152 headers : reqHeaders ,
134- mode : ' no-cors'
153+ mode : " no-cors" ,
135154 } ) ;
136- return fetch ( req )
155+ return fetch ( req ) ;
137156 }
138157 /**
139158 * Handles GET requests.
@@ -142,34 +161,43 @@ class ProxyApp {
142161 */
143162 private async handleGetRequest ( c : any ) : Promise < Response > {
144163 const path = new URL ( c . req . url ) . pathname ;
145- if ( path === ' /v1/models' ) {
164+ if ( path === " /v1/models" ) {
146165 try {
147166 const models = await this . getModelsList ( ) ;
148- const headers = this . setCORSHeaders ( c . req . raw , c . req . raw . headers . get ( 'origin' ) ) ;
167+ const headers = this . setCORSHeaders (
168+ c . req . raw ,
169+ c . req . raw . headers . get ( "origin" ) ,
170+ ) ;
149171
150172 const formattedResponse = {
151173 object : "list" ,
152174 data : models . map ( ( model : any ) => ( {
153175 id : model . name ,
154176 object : "model" ,
155177 created : Date . now ( ) ,
156- owned_by : "ollama"
157- } ) )
178+ owned_by : "ollama" ,
179+ } ) ) ,
158180 } ;
159181
160182 return new Response ( JSON . stringify ( formattedResponse ) , {
161183 status : 200 ,
162- headers : headers
184+ headers : headers ,
163185 } ) ;
164186 } catch ( error ) {
165- return new Response ( JSON . stringify ( { error : "Failed to fetch models" } ) , {
166- status : 500 ,
167- headers : { 'Content-Type' : 'application/json' }
168- } ) ;
187+ return new Response (
188+ JSON . stringify ( { error : "Failed to fetch models" } ) ,
189+ {
190+ status : 500 ,
191+ headers : { "Content-Type" : "application/json" } ,
192+ } ,
193+ ) ;
169194 }
170195 }
171196
172- const url = convertToCustomEndpoint ( c . req . url , parseURL ( this . ollamaEndpoint ) ) ;
197+ const url = convertToCustomEndpoint (
198+ c . req . url ,
199+ parseURL ( this . ollamaEndpoint ) ,
200+ ) ;
173201 const req = new Request ( url , c . req . raw ) ;
174202 req . headers . set ( "Host" , this . ollamaEndpoint ) ;
175203 return fetch ( req ) ;
@@ -183,7 +211,7 @@ class ProxyApp {
183211 private handleOptionsRequest ( c : any ) : Response {
184212 return new Response ( null , {
185213 status : 204 ,
186- headers : this . setCORSHeaders ( c . req . raw , c . req . raw . headers . get ( ' origin' ) )
214+ headers : this . setCORSHeaders ( c . req . raw , c . req . raw . headers . get ( " origin" ) ) ,
187215 } ) ;
188216 }
189217
@@ -194,10 +222,10 @@ class ProxyApp {
194222 private async getModelsList ( ) : Promise < any [ ] > {
195223 const url = `${ this . ollamaEndpoint } /api/tags` ;
196224 const req = new Request ( url , {
197- method : ' GET' ,
225+ method : " GET" ,
198226 headers : new Headers ( {
199- ' Accept' : ' application/json' ,
200- } )
227+ " Accept" : " application/json" ,
228+ } ) ,
201229 } ) ;
202230
203231 try {
@@ -216,16 +244,16 @@ class ProxyApp {
216244 */
217245 public createApp ( ) : Hono {
218246 const app = new Hono ( ) ;
219- app . use ( '*' , async ( c : any , next : ( ) => Promise < any > ) => {
247+ app . use ( "*" , async ( c : any , next : ( ) => Promise < any > ) => {
220248 // For OPTIONS requests, directly return
221249 return this . handleAllRequest ( c , next ) ;
222250 } ) ;
223251
224- app . post ( '*' , ( c : any ) => {
252+ app . post ( "*" , ( c : any ) => {
225253 return this . handlePostRequest ( c ) ;
226254 } ) ;
227255
228- app . get ( '*' , ( c : any ) => {
256+ app . get ( "*" , ( c : any ) => {
229257 return this . handleGetRequest ( c ) ;
230258 } ) ;
231259
0 commit comments