{"payload":{"feedbackUrl":"https://github.com/orgs/community/discussions/53140","repo":{"id":813857006,"defaultBranch":"main","name":"AIProxySwift","ownerLogin":"lzell","currentUserCanPush":false,"isFork":false,"isEmpty":false,"createdAt":"2024-06-11T22:03:40.000Z","ownerAvatar":"https://avatars.githubusercontent.com/u/35940?v=4","public":true,"private":false,"isOrgOwned":false},"refInfo":{"name":"","listCacheKey":"v0:1726797971.0","currentOid":""},"activityList":{"items":[{"before":"553c5b14f96dc294b907eb3b67f197d00826240d","after":"8be554a8230b97cd1a04b7cf4c5c03b1a9ce9e09","ref":"refs/heads/main","pushedAt":"2024-09-20T02:05:26.000Z","pushType":"pr_merge","commitsCount":1,"pusher":{"login":"lzell","name":"Lou Zell","path":"/lzell","primaryAvatarUrl":"https://avatars.githubusercontent.com/u/35940?s=80&v=4"},"commit":{"message":"Add token usage stats to OpenAI chat structs (#45)\n\nThe `usage` property is now available on `OpenAIChatCompletionResponseBody` and `OpenAIChatCompletionStreamingChunk`","shortMessageHtmlLink":"Add token usage stats to OpenAI chat structs (#45)"}},{"before":"553c5b14f96dc294b907eb3b67f197d00826240d","after":"b4e3a0a2c4c111e44937de9e7c9aca6b422be407","ref":"refs/heads/lz/add-openai-usage","pushedAt":"2024-09-20T01:50:21.000Z","pushType":"push","commitsCount":1,"pusher":{"login":"lzell","name":"Lou Zell","path":"/lzell","primaryAvatarUrl":"https://avatars.githubusercontent.com/u/35940?s=80&v=4"},"commit":{"message":"Add token usage stats to OpenAI chat structs","shortMessageHtmlLink":"Add token usage stats to OpenAI chat structs"}},{"before":null,"after":"553c5b14f96dc294b907eb3b67f197d00826240d","ref":"refs/heads/lz/add-openai-usage","pushedAt":"2024-09-20T00:17:06.000Z","pushType":"branch_creation","commitsCount":0,"pusher":{"login":"lzell","name":"Lou Zell","path":"/lzell","primaryAvatarUrl":"https://avatars.githubusercontent.com/u/35940?s=80&v=4"},"commit":{"message":"Add support for Flux-PuLID (#44)","shortMessageHtmlLink":"Add support for Flux-PuLID (#44)"}},{"before":"21bd1dc778c0b22582c091eb15cf126676f7a599","after":"553c5b14f96dc294b907eb3b67f197d00826240d","ref":"refs/heads/main","pushedAt":"2024-09-19T19:05:56.000Z","pushType":"pr_merge","commitsCount":1,"pusher":{"login":"lzell","name":"Lou Zell","path":"/lzell","primaryAvatarUrl":"https://avatars.githubusercontent.com/u/35940?s=80&v=4"},"commit":{"message":"Add support for Flux-PuLID (#44)","shortMessageHtmlLink":"Add support for Flux-PuLID (#44)"}},{"before":"a550c958e5982957682eb44c8af4a02119ba3fa7","after":"20ac9e2328c53177379cbe26152b8bbba12ad273","ref":"refs/heads/lz/add-support-for-flux-pulid","pushedAt":"2024-09-19T19:05:13.000Z","pushType":"push","commitsCount":1,"pusher":{"login":"lzell","name":"Lou Zell","path":"/lzell","primaryAvatarUrl":"https://avatars.githubusercontent.com/u/35940?s=80&v=4"},"commit":{"message":"Use UIImage in example","shortMessageHtmlLink":"Use UIImage in example"}},{"before":"cbfd621519ee22eeb8e11f71b4c5bcbd3b01d592","after":"a550c958e5982957682eb44c8af4a02119ba3fa7","ref":"refs/heads/lz/add-support-for-flux-pulid","pushedAt":"2024-09-19T19:01:52.000Z","pushType":"force_push","commitsCount":0,"pusher":{"login":"lzell","name":"Lou Zell","path":"/lzell","primaryAvatarUrl":"https://avatars.githubusercontent.com/u/35940?s=80&v=4"},"commit":{"message":"Add support for Flux-PuLID","shortMessageHtmlLink":"Add support for Flux-PuLID"}},{"before":"e1d10af490b80557475d1a909156a743b39c532f","after":"cbfd621519ee22eeb8e11f71b4c5bcbd3b01d592","ref":"refs/heads/lz/add-support-for-flux-pulid","pushedAt":"2024-09-19T18:56:02.000Z","pushType":"push","commitsCount":1,"pusher":{"login":"lzell","name":"Lou Zell","path":"/lzell","primaryAvatarUrl":"https://avatars.githubusercontent.com/u/35940?s=80&v=4"},"commit":{"message":"Squash","shortMessageHtmlLink":"Squash"}},{"before":null,"after":"e1d10af490b80557475d1a909156a743b39c532f","ref":"refs/heads/lz/add-support-for-flux-pulid","pushedAt":"2024-09-19T18:13:54.000Z","pushType":"branch_creation","commitsCount":0,"pusher":{"login":"lzell","name":"Lou Zell","path":"/lzell","primaryAvatarUrl":"https://avatars.githubusercontent.com/u/35940?s=80&v=4"},"commit":{"message":"Main image required","shortMessageHtmlLink":"Main image required"}},{"before":"3f1fa23eb8cac02c9fd280842682f17da043872d","after":"21bd1dc778c0b22582c091eb15cf126676f7a599","ref":"refs/heads/main","pushedAt":"2024-09-19T07:21:47.000Z","pushType":"pr_merge","commitsCount":1,"pusher":{"login":"lzell","name":"Lou Zell","path":"/lzell","primaryAvatarUrl":"https://avatars.githubusercontent.com/u/35940?s=80&v=4"},"commit":{"message":"Add OpenAI assistant role to chat completions API (#42)","shortMessageHtmlLink":"Add OpenAI assistant role to chat completions API (#42)"}},{"before":"3f1fa23eb8cac02c9fd280842682f17da043872d","after":"e36bbd0485da8c4dfa2f16d259cfe7b95d21a008","ref":"refs/heads/lz/add-assistant-role","pushedAt":"2024-09-19T07:18:58.000Z","pushType":"push","commitsCount":1,"pusher":{"login":"lzell","name":"Lou Zell","path":"/lzell","primaryAvatarUrl":"https://avatars.githubusercontent.com/u/35940?s=80&v=4"},"commit":{"message":"Add OpenAI assistant role to chat completions API","shortMessageHtmlLink":"Add OpenAI assistant role to chat completions API"}},{"before":null,"after":"3f1fa23eb8cac02c9fd280842682f17da043872d","ref":"refs/heads/lz/add-assistant-role","pushedAt":"2024-09-19T06:26:04.000Z","pushType":"branch_creation","commitsCount":0,"pusher":{"login":"lzell","name":"Lou Zell","path":"/lzell","primaryAvatarUrl":"https://avatars.githubusercontent.com/u/35940?s=80&v=4"},"commit":{"message":"Add support for Azure deployments (#41)","shortMessageHtmlLink":"Add support for Azure deployments (#41)"}},{"before":"81707181196428797048b972e4fffa1d7f7df70c","after":"3f1fa23eb8cac02c9fd280842682f17da043872d","ref":"refs/heads/main","pushedAt":"2024-09-19T05:18:19.000Z","pushType":"pr_merge","commitsCount":1,"pusher":{"login":"lzell","name":"Lou Zell","path":"/lzell","primaryAvatarUrl":"https://avatars.githubusercontent.com/u/35940?s=80&v=4"},"commit":{"message":"Add support for Azure deployments (#41)","shortMessageHtmlLink":"Add support for Azure deployments (#41)"}},{"before":"33976243c7af549a30342534098bfe0d252268e4","after":"21ba1c659122fca5d537d307d58ed362fb42ae4f","ref":"refs/heads/lz/azure-experiment","pushedAt":"2024-09-19T05:16:25.000Z","pushType":"push","commitsCount":1,"pusher":{"login":"lzell","name":"Lou Zell","path":"/lzell","primaryAvatarUrl":"https://avatars.githubusercontent.com/u/35940?s=80&v=4"},"commit":{"message":"Add RequestFormat.azureDeployment as an initialization option","shortMessageHtmlLink":"Add RequestFormat.azureDeployment as an initialization option"}},{"before":"81707181196428797048b972e4fffa1d7f7df70c","after":"33976243c7af549a30342534098bfe0d252268e4","ref":"refs/heads/lz/azure-experiment","pushedAt":"2024-09-18T16:19:28.000Z","pushType":"push","commitsCount":1,"pusher":{"login":"lzell","name":"Lou Zell","path":"/lzell","primaryAvatarUrl":"https://avatars.githubusercontent.com/u/35940?s=80&v=4"},"commit":{"message":"Experiment to see if Azure chat completion works OOTB","shortMessageHtmlLink":"Experiment to see if Azure chat completion works OOTB"}},{"before":null,"after":"81707181196428797048b972e4fffa1d7f7df70c","ref":"refs/heads/lz/azure-experiment","pushedAt":"2024-09-18T16:18:57.000Z","pushType":"branch_creation","commitsCount":0,"pusher":{"login":"lzell","name":"Lou Zell","path":"/lzell","primaryAvatarUrl":"https://avatars.githubusercontent.com/u/35940?s=80&v=4"},"commit":{"message":"Do not json deserialize whisper response for 'text' responseFormat (#40)\n\nIf a customer specifies that the whisper response format is text, then we don't want to json deserialize the http response body from OpenAI.\r\n\r\nBefore this change, a request format like the following would raise on the `let response = try` line:\r\n\r\n```swift\r\nlet requestBody = OpenAICreateTranscriptionRequestBody(\r\n file: try Data(contentsOf: url),\r\n model: \"whisper-1\",\r\n responseFormat: \"text\"\r\n)\r\nlet response = try await openAIService.createTranscriptionRequest(body: requestBody)\r\n```\r\n\r\nThis patch skips the use of Decodable and instead instantiates the `OpenAICreateTranscriptionResponseBody` directly using the http response body.\r\n\r\nThanks to Pierre for reporting!","shortMessageHtmlLink":"Do not json deserialize whisper response for 'text' responseFormat (#40)"}},{"before":"e37d46eb139f0dc2fe72c2ee2cdd1491a0290c01","after":"4fd461fbbf19176151632a62cf8a19fd1c6b175f","ref":"refs/heads/lz/add-fal-training-workflow","pushedAt":"2024-09-18T16:09:51.000Z","pushType":"push","commitsCount":1,"pusher":{"login":"lzell","name":"Lou Zell","path":"/lzell","primaryAvatarUrl":"https://avatars.githubusercontent.com/u/35940?s=80&v=4"},"commit":{"message":"Return to me","shortMessageHtmlLink":"Return to me"}},{"before":"ff6c90d904d000fed8d08bc2a463d3acc5c5cb5e","after":"81707181196428797048b972e4fffa1d7f7df70c","ref":"refs/heads/main","pushedAt":"2024-09-17T06:27:27.000Z","pushType":"pr_merge","commitsCount":1,"pusher":{"login":"lzell","name":"Lou Zell","path":"/lzell","primaryAvatarUrl":"https://avatars.githubusercontent.com/u/35940?s=80&v=4"},"commit":{"message":"Do not json deserialize whisper response for 'text' responseFormat (#40)\n\nIf a customer specifies that the whisper response format is text, then we don't want to json deserialize the http response body from OpenAI.\r\n\r\nBefore this change, a request format like the following would raise on the `let response = try` line:\r\n\r\n```swift\r\nlet requestBody = OpenAICreateTranscriptionRequestBody(\r\n file: try Data(contentsOf: url),\r\n model: \"whisper-1\",\r\n responseFormat: \"text\"\r\n)\r\nlet response = try await openAIService.createTranscriptionRequest(body: requestBody)\r\n```\r\n\r\nThis patch skips the use of Decodable and instead instantiates the `OpenAICreateTranscriptionResponseBody` directly using the http response body.\r\n\r\nThanks to Pierre for reporting!","shortMessageHtmlLink":"Do not json deserialize whisper response for 'text' responseFormat (#40)"}},{"before":null,"after":"9775a01faf975d3a2ab3f84eae5b0b5d17724616","ref":"refs/heads/lz/bug-fix-whisper-response-format","pushedAt":"2024-09-17T06:19:58.000Z","pushType":"branch_creation","commitsCount":0,"pusher":{"login":"lzell","name":"Lou Zell","path":"/lzell","primaryAvatarUrl":"https://avatars.githubusercontent.com/u/35940?s=80&v=4"},"commit":{"message":"Do not json deserialize whisper response for 'text' responseFormat","shortMessageHtmlLink":"Do not json deserialize whisper response for 'text' responseFormat"}},{"before":"16ba66dd81586553b50cf79c38c7b80a10c43215","after":"e37d46eb139f0dc2fe72c2ee2cdd1491a0290c01","ref":"refs/heads/lz/add-fal-training-workflow","pushedAt":"2024-09-16T23:24:35.000Z","pushType":"push","commitsCount":1,"pusher":{"login":"lzell","name":"Lou Zell","path":"/lzell","primaryAvatarUrl":"https://avatars.githubusercontent.com/u/35940?s=80&v=4"},"commit":{"message":"Training not working","shortMessageHtmlLink":"Training not working"}},{"before":"1d17583feb8dfb7696bda833af779c4ab356a2c9","after":"16ba66dd81586553b50cf79c38c7b80a10c43215","ref":"refs/heads/lz/add-fal-training-workflow","pushedAt":"2024-09-16T17:52:01.000Z","pushType":"push","commitsCount":2,"pusher":{"login":"lzell","name":"Lou Zell","path":"/lzell","primaryAvatarUrl":"https://avatars.githubusercontent.com/u/35940?s=80&v=4"},"commit":{"message":"Return to me","shortMessageHtmlLink":"Return to me"}},{"before":"1d17583feb8dfb7696bda833af779c4ab356a2c9","after":"ff6c90d904d000fed8d08bc2a463d3acc5c5cb5e","ref":"refs/heads/main","pushedAt":"2024-09-16T15:27:07.000Z","pushType":"pr_merge","commitsCount":1,"pusher":{"login":"lzell","name":"Lou Zell","path":"/lzell","primaryAvatarUrl":"https://avatars.githubusercontent.com/u/35940?s=80&v=4"},"commit":{"message":"Add detail param to OpenAI chat completion (#39)\n\nThis change adds the `detail` parameter to chat completions, which has three options, low, high, or auto. \r\n\r\nThis gives you control when using Vision (available for GPT-4o, GPT-4o mini, and GPT-4 Turbo) over how the model processes the image and generates its textual understanding. By default, the model will use the auto setting which will look at the image input size and decide if it should use the low or high setting.\r\n\r\n`low` will enable the \"low res\" mode. The model will receive a low-res 512px x 512px version of the image, and represent the image with a budget of 85 tokens. This allows the API to return faster responses and consume fewer input tokens for use cases that do not require high detail.\r\n\r\n`high` will enable \"high res\" mode, which first allows the model to first see the low res image (using 85 tokens) and then creates detailed crops using 170 tokens for each 512px x 512px tile.","shortMessageHtmlLink":"Add detail param to OpenAI chat completion (#39)"}},{"before":"310db4c645d291db522d974c17470313b909587b","after":"b05067d5dac15af8f75f40d7970139248713e25b","ref":"refs/heads/add-low-res-param-openai","pushedAt":"2024-09-16T15:25:58.000Z","pushType":"push","commitsCount":1,"pusher":{"login":"lzell","name":"Lou Zell","path":"/lzell","primaryAvatarUrl":"https://avatars.githubusercontent.com/u/35940?s=80&v=4"},"commit":{"message":"Remove whitespace","shortMessageHtmlLink":"Remove whitespace"}},{"before":"c48ed49db21e85a4c4d8f658129179e3b528c365","after":"310db4c645d291db522d974c17470313b909587b","ref":"refs/heads/add-low-res-param-openai","pushedAt":"2024-09-16T15:09:03.000Z","pushType":"push","commitsCount":1,"pusher":{"login":"toddham","name":"Todd Hamilton","path":"/toddham","primaryAvatarUrl":"https://avatars.githubusercontent.com/u/180131?s=80&v=4"},"commit":{"message":"Make detail optional","shortMessageHtmlLink":"Make detail optional"}},{"before":null,"after":"1d17583feb8dfb7696bda833af779c4ab356a2c9","ref":"refs/heads/lz/add-fal-training-workflow","pushedAt":"2024-09-15T20:24:06.000Z","pushType":"branch_creation","commitsCount":0,"pusher":{"login":"lzell","name":"Lou Zell","path":"/lzell","primaryAvatarUrl":"https://avatars.githubusercontent.com/u/35940?s=80&v=4"},"commit":{"message":"Add support for Fal.ai (#38)\n\n- Added support for creating inferences, polling for completion, and then fetching the result.\r\n- Added a convenience method to generate a `fal-ai/fast-sdxl` image, serving as a template for more models.\r\n- Added example to the readme for generating an image with `fal-ai/fast-sdxl`","shortMessageHtmlLink":"Add support for Fal.ai (#38)"}},{"before":"e1cf5ae9719e519577acaebdea34b57aeeaa9f35","after":"1d17583feb8dfb7696bda833af779c4ab356a2c9","ref":"refs/heads/main","pushedAt":"2024-09-15T06:14:22.000Z","pushType":"pr_merge","commitsCount":1,"pusher":{"login":"lzell","name":"Lou Zell","path":"/lzell","primaryAvatarUrl":"https://avatars.githubusercontent.com/u/35940?s=80&v=4"},"commit":{"message":"Add support for Fal.ai (#38)\n\n- Added support for creating inferences, polling for completion, and then fetching the result.\r\n- Added a convenience method to generate a `fal-ai/fast-sdxl` image, serving as a template for more models.\r\n- Added example to the readme for generating an image with `fal-ai/fast-sdxl`","shortMessageHtmlLink":"Add support for Fal.ai (#38)"}},{"before":"9c641015ac15f3defd98492ce00176559aa67b25","after":"2b5626a502fc8ca50eb52af4915ee598a9209666","ref":"refs/heads/lz/add-fal-support","pushedAt":"2024-09-15T06:08:23.000Z","pushType":"force_push","commitsCount":0,"pusher":{"login":"lzell","name":"Lou Zell","path":"/lzell","primaryAvatarUrl":"https://avatars.githubusercontent.com/u/35940?s=80&v=4"},"commit":{"message":"Add support for Fal.ai","shortMessageHtmlLink":"Add support for Fal.ai"}},{"before":"3dfd4e4dce38657124c0b645c9f8300b5d15c937","after":"9c641015ac15f3defd98492ce00176559aa67b25","ref":"refs/heads/lz/add-fal-support","pushedAt":"2024-09-15T06:00:26.000Z","pushType":"push","commitsCount":1,"pusher":{"login":"lzell","name":"Lou Zell","path":"/lzell","primaryAvatarUrl":"https://avatars.githubusercontent.com/u/35940?s=80&v=4"},"commit":{"message":"Add example","shortMessageHtmlLink":"Add example"}},{"before":"2ca91571411ba5837f9a2a40254f6381e6fc7fdf","after":"3dfd4e4dce38657124c0b645c9f8300b5d15c937","ref":"refs/heads/lz/add-fal-support","pushedAt":"2024-09-15T05:57:25.000Z","pushType":"force_push","commitsCount":0,"pusher":{"login":"lzell","name":"Lou Zell","path":"/lzell","primaryAvatarUrl":"https://avatars.githubusercontent.com/u/35940?s=80&v=4"},"commit":{"message":"Add support for Fal.ai","shortMessageHtmlLink":"Add support for Fal.ai"}},{"before":"3530f7de252662ed618fef3d7b09a55eeb8bb23e","after":"2ca91571411ba5837f9a2a40254f6381e6fc7fdf","ref":"refs/heads/lz/add-fal-support","pushedAt":"2024-09-15T01:52:15.000Z","pushType":"force_push","commitsCount":0,"pusher":{"login":"lzell","name":"Lou Zell","path":"/lzell","primaryAvatarUrl":"https://avatars.githubusercontent.com/u/35940?s=80&v=4"},"commit":{"message":"Patch","shortMessageHtmlLink":"Patch"}},{"before":"eeaf701711703d783755b493122f620a07832b3b","after":"e1cf5ae9719e519577acaebdea34b57aeeaa9f35","ref":"refs/heads/main","pushedAt":"2024-09-15T01:49:16.000Z","pushType":"pr_merge","commitsCount":1,"pusher":{"login":"lzell","name":"Lou Zell","path":"/lzell","primaryAvatarUrl":"https://avatars.githubusercontent.com/u/35940?s=80&v=4"},"commit":{"message":"Consistent use of serialization and deserialization (#37)\n\n- Added internal extension on Encodable for serialization helpers\r\n- Added internal extension on Decodable for deserialization helpers\r\n- Made request creation consistent throughout lib\r\n- Updated tests","shortMessageHtmlLink":"Consistent use of serialization and deserialization (#37)"}}],"hasNextPage":true,"hasPreviousPage":false,"activityType":"all","actor":null,"timePeriod":"all","sort":"DESC","perPage":30,"cursor":"djE6ks8AAAAEu3o45wA","startCursor":null,"endCursor":null}},"title":"Activity ยท lzell/AIProxySwift"}