{"payload":{"feedbackUrl":"https://github.com/orgs/community/discussions/53140","repo":{"id":767380208,"defaultBranch":"main","name":"llama_index","ownerLogin":"intel-analytics","currentUserCanPush":false,"isFork":true,"isEmpty":false,"createdAt":"2024-03-05T07:34:25.000Z","ownerAvatar":"https://avatars.githubusercontent.com/u/10941215?v=4","public":true,"private":false,"isOrgOwned":true},"refInfo":{"name":"","listCacheKey":"v0:1716278331.0","currentOid":""},"activityList":{"items":[{"before":"8d175082626fb5da9e5f4b1fed39e5487d5afca1","after":"2aeb8759781e87a36d753a13c016d2c3d7deada7","ref":"refs/heads/ipex-llm-llm-gpu","pushedAt":"2024-05-23T08:37:26.000Z","pushType":"pr_merge","commitsCount":20,"pusher":{"login":"ivy-lv11","name":null,"path":"/ivy-lv11","primaryAvatarUrl":"https://avatars.githubusercontent.com/u/59141989?s=80&v=4"},"commit":{"message":"Merge pull request #24 from ivy-lv11/ipex-llm-llm-gpu\n\nAdd IPEX-LLM with GPU","shortMessageHtmlLink":"Merge pull request #24 from ivy-lv11/ipex-llm-llm-gpu"}},{"before":"d7fc6c4fc110f64c5f35d234749d2c39d1ed955d","after":"f6f2df178f4018b4b72a5e9213c080d462b49263","ref":"refs/heads/ipex-llm-embedding-device-fix","pushedAt":"2024-05-23T01:59:10.000Z","pushType":"push","commitsCount":1,"pusher":{"login":"Oscilloscope98","name":"Yuwen Hu","path":"/Oscilloscope98","primaryAvatarUrl":"https://avatars.githubusercontent.com/u/54161268?s=80&v=4"},"commit":{"message":"Update based on comments","shortMessageHtmlLink":"Update based on comments"}},{"before":"8f1e97874f016573e5b4a36c215d9547c8f3526d","after":"d7fc6c4fc110f64c5f35d234749d2c39d1ed955d","ref":"refs/heads/ipex-llm-embedding-device-fix","pushedAt":"2024-05-22T08:04:30.000Z","pushType":"pr_merge","commitsCount":5,"pusher":{"login":"Oscilloscope98","name":"Yuwen Hu","path":"/Oscilloscope98","primaryAvatarUrl":"https://avatars.githubusercontent.com/u/54161268?s=80&v=4"},"commit":{"message":"Merge pull request #26 from Oscilloscope98/embedding-device-logic-update\n\n[Community] Update optimization logic regarding device for `llama-index-embeddings-ipex-llm`","shortMessageHtmlLink":"Merge pull request #26 from Oscilloscope98/embedding-device-logic-update"}},{"before":null,"after":"8f1e97874f016573e5b4a36c215d9547c8f3526d","ref":"refs/heads/ipex-llm-embedding-device-fix","pushedAt":"2024-05-21T07:58:51.000Z","pushType":"branch_creation","commitsCount":0,"pusher":{"login":"Oscilloscope98","name":"Yuwen Hu","path":"/Oscilloscope98","primaryAvatarUrl":"https://avatars.githubusercontent.com/u/54161268?s=80&v=4"},"commit":{"message":"Implement local Nomic Embed with the inference_mode parameter (#13607)","shortMessageHtmlLink":"Implement local Nomic Embed with the inference_mode parameter (run-ll…"}},{"before":"8f1e97874f016573e5b4a36c215d9547c8f3526d","after":null,"ref":"refs/heads/ipex-llm-embedding-cpu-device-fix","pushedAt":"2024-05-21T07:58:24.000Z","pushType":"branch_deletion","commitsCount":0,"pusher":{"login":"Oscilloscope98","name":"Yuwen Hu","path":"/Oscilloscope98","primaryAvatarUrl":"https://avatars.githubusercontent.com/u/54161268?s=80&v=4"}},{"before":null,"after":"8f1e97874f016573e5b4a36c215d9547c8f3526d","ref":"refs/heads/ipex-llm-embedding-cpu-device-fix","pushedAt":"2024-05-21T07:56:18.000Z","pushType":"branch_creation","commitsCount":0,"pusher":{"login":"Oscilloscope98","name":"Yuwen Hu","path":"/Oscilloscope98","primaryAvatarUrl":"https://avatars.githubusercontent.com/u/54161268?s=80&v=4"},"commit":{"message":"Implement local Nomic Embed with the inference_mode parameter (#13607)","shortMessageHtmlLink":"Implement local Nomic Embed with the inference_mode parameter (run-ll…"}},{"before":"8d175082626fb5da9e5f4b1fed39e5487d5afca1","after":"8f1e97874f016573e5b4a36c215d9547c8f3526d","ref":"refs/heads/main","pushedAt":"2024-05-21T07:55:00.000Z","pushType":"push","commitsCount":7,"pusher":{"login":"Oscilloscope98","name":"Yuwen Hu","path":"/Oscilloscope98","primaryAvatarUrl":"https://avatars.githubusercontent.com/u/54161268?s=80&v=4"},"commit":{"message":"Implement local Nomic Embed with the inference_mode parameter (#13607)","shortMessageHtmlLink":"Implement local Nomic Embed with the inference_mode parameter (run-ll…"}},{"before":null,"after":"8d175082626fb5da9e5f4b1fed39e5487d5afca1","ref":"refs/heads/ipex-llm-llm-gpu","pushedAt":"2024-05-20T07:24:52.000Z","pushType":"branch_creation","commitsCount":0,"pusher":{"login":"Oscilloscope98","name":"Yuwen Hu","path":"/Oscilloscope98","primaryAvatarUrl":"https://avatars.githubusercontent.com/u/54161268?s=80&v=4"},"commit":{"message":"Integration: Bump duckduckgo-search package (#13583)","shortMessageHtmlLink":"Integration: Bump duckduckgo-search package (run-llama#13583)"}},{"before":"767de070b231fb328b6c0640c2e002c9c7af0a83","after":"8d175082626fb5da9e5f4b1fed39e5487d5afca1","ref":"refs/heads/main","pushedAt":"2024-05-20T05:39:26.000Z","pushType":"push","commitsCount":16,"pusher":{"login":"ivy-lv11","name":null,"path":"/ivy-lv11","primaryAvatarUrl":"https://avatars.githubusercontent.com/u/59141989?s=80&v=4"},"commit":{"message":"Integration: Bump duckduckgo-search package (#13583)","shortMessageHtmlLink":"Integration: Bump duckduckgo-search package (run-llama#13583)"}},{"before":"4c0a6172226214b92275e9e2c5eb0d21856d6863","after":"767de070b231fb328b6c0640c2e002c9c7af0a83","ref":"refs/heads/main","pushedAt":"2024-05-17T09:55:53.000Z","pushType":"push","commitsCount":16,"pusher":{"login":"Oscilloscope98","name":"Yuwen Hu","path":"/Oscilloscope98","primaryAvatarUrl":"https://avatars.githubusercontent.com/u/54161268?s=80&v=4"},"commit":{"message":"Allow querying AzureAISearch without non-null metadata field (#13531)","shortMessageHtmlLink":"Allow querying AzureAISearch without non-null metadata field (run-lla…"}},{"before":"4c0a6172226214b92275e9e2c5eb0d21856d6863","after":"53c8d47e4ef0227ec45e3f9dfc4ae091c5718b77","ref":"refs/heads/ipex-llm-embedding-gpu-dependency-fix","pushedAt":"2024-05-15T10:30:07.000Z","pushType":"pr_merge","commitsCount":4,"pusher":{"login":"Oscilloscope98","name":"Yuwen Hu","path":"/Oscilloscope98","primaryAvatarUrl":"https://avatars.githubusercontent.com/u/54161268?s=80&v=4"},"commit":{"message":"Merge pull request #21 from Oscilloscope98/adjust-pyproject-for-embedding-gpu\n\nResolve releasing error for `llama-index-embeddings-ipex-llm` with Intel GPU supports","shortMessageHtmlLink":"Merge pull request #21 from Oscilloscope98/adjust-pyproject-for-embed…"}},{"before":"2f6e48424831d41ef08d1344150e5a45a0f6b671","after":null,"ref":"refs/heads/main-backup","pushedAt":"2024-05-15T06:27:55.000Z","pushType":"branch_deletion","commitsCount":0,"pusher":{"login":"Oscilloscope98","name":"Yuwen Hu","path":"/Oscilloscope98","primaryAvatarUrl":"https://avatars.githubusercontent.com/u/54161268?s=80&v=4"}},{"before":null,"after":"4c0a6172226214b92275e9e2c5eb0d21856d6863","ref":"refs/heads/ipex-llm-embedding-gpu-dependency-fix","pushedAt":"2024-05-15T06:18:05.000Z","pushType":"branch_creation","commitsCount":0,"pusher":{"login":"Oscilloscope98","name":"Yuwen Hu","path":"/Oscilloscope98","primaryAvatarUrl":"https://avatars.githubusercontent.com/u/54161268?s=80&v=4"},"commit":{"message":"chore: grammar fixes and add colab badge to nb (#13501)\n\n* fix annoying grammar errors\r\n\r\n* add colab badge","shortMessageHtmlLink":"chore: grammar fixes and add colab badge to nb (run-llama#13501)"}},{"before":"2f6e48424831d41ef08d1344150e5a45a0f6b671","after":"4c0a6172226214b92275e9e2c5eb0d21856d6863","ref":"refs/heads/main","pushedAt":"2024-05-15T06:14:11.000Z","pushType":"force_push","commitsCount":0,"pusher":{"login":"Oscilloscope98","name":"Yuwen Hu","path":"/Oscilloscope98","primaryAvatarUrl":"https://avatars.githubusercontent.com/u/54161268?s=80&v=4"},"commit":{"message":"chore: grammar fixes and add colab badge to nb (#13501)\n\n* fix annoying grammar errors\r\n\r\n* add colab badge","shortMessageHtmlLink":"chore: grammar fixes and add colab badge to nb (run-llama#13501)"}},{"before":null,"after":"2f6e48424831d41ef08d1344150e5a45a0f6b671","ref":"refs/heads/main-backup","pushedAt":"2024-05-15T06:12:42.000Z","pushType":"branch_creation","commitsCount":0,"pusher":{"login":"Oscilloscope98","name":"Yuwen Hu","path":"/Oscilloscope98","primaryAvatarUrl":"https://avatars.githubusercontent.com/u/54161268?s=80&v=4"},"commit":{"message":"Merge pull request #15 from shane-huang/load-lowbit\n\nadd load low-bit model support and update examples","shortMessageHtmlLink":"Merge pull request #15 from shane-huang/load-lowbit"}},{"before":"cee23c9da888b6bbd09a05e544686aa3c7ded689","after":"1a1bc929a76a9d92fb95213236a22130d3f9e8a3","ref":"refs/heads/ipex-llm-embedding-gpu","pushedAt":"2024-04-25T03:11:27.000Z","pushType":"pr_merge","commitsCount":12,"pusher":{"login":"Oscilloscope98","name":"Yuwen Hu","path":"/Oscilloscope98","primaryAvatarUrl":"https://avatars.githubusercontent.com/u/54161268?s=80&v=4"},"commit":{"message":"Merge pull request #19 from Oscilloscope98/initial-ipex-llm-embedding-gpu\n\nSupport llama-index-embeddings-ipex-llm for Intel GPUs","shortMessageHtmlLink":"Merge pull request #19 from Oscilloscope98/initial-ipex-llm-embedding…"}},{"before":null,"after":"cee23c9da888b6bbd09a05e544686aa3c7ded689","ref":"refs/heads/ipex-llm-embedding-gpu","pushedAt":"2024-04-24T02:23:40.000Z","pushType":"branch_creation","commitsCount":0,"pusher":{"login":"Oscilloscope98","name":"Yuwen Hu","path":"/Oscilloscope98","primaryAvatarUrl":"https://avatars.githubusercontent.com/u/54161268?s=80&v=4"},"commit":{"message":"[FEATURE] Support both Predibase SDK-v1 and SDK-v2 (#13066)","shortMessageHtmlLink":"[FEATURE] Support both Predibase SDK-v1 and SDK-v2 (run-llama#13066)"}},{"before":"a0d793aa07c8baf9683cf682f07b712f56971db5","after":"cee23c9da888b6bbd09a05e544686aa3c7ded689","ref":"refs/heads/bigdl","pushedAt":"2024-04-24T02:19:38.000Z","pushType":"push","commitsCount":585,"pusher":{"login":"Oscilloscope98","name":"Yuwen Hu","path":"/Oscilloscope98","primaryAvatarUrl":"https://avatars.githubusercontent.com/u/54161268?s=80&v=4"},"commit":{"message":"[FEATURE] Support both Predibase SDK-v1 and SDK-v2 (#13066)","shortMessageHtmlLink":"[FEATURE] Support both Predibase SDK-v1 and SDK-v2 (run-llama#13066)"}},{"before":null,"after":"eae443bbd8f909cfe958f724312387c66c507863","ref":"refs/heads/load-lowbit2","pushedAt":"2024-04-12T06:19:45.000Z","pushType":"branch_creation","commitsCount":0,"pusher":{"login":"shane-huang","name":"Shengsheng Huang","path":"/shane-huang","primaryAvatarUrl":"https://avatars.githubusercontent.com/u/1995599?s=80&v=4"},"commit":{"message":"small fix","shortMessageHtmlLink":"small fix"}},{"before":"1ad3b797ec738d10feb5266bd3e258d534b0ed7e","after":"db87cde8cb544bb44abc41ac40654af4b4e55674","ref":"refs/heads/ipex-llm-embedding","pushedAt":"2024-04-12T02:43:10.000Z","pushType":"pr_merge","commitsCount":2,"pusher":{"login":"Oscilloscope98","name":"Yuwen Hu","path":"/Oscilloscope98","primaryAvatarUrl":"https://avatars.githubusercontent.com/u/54161268?s=80&v=4"},"commit":{"message":"Merge pull request #18 from Oscilloscope98/embedding-dependency-further-update\n\nEmbedding dependency further update: loosen numpy and mpmath","shortMessageHtmlLink":"Merge pull request #18 from Oscilloscope98/embedding-dependency-furth…"}},{"before":"fc7ecb1a3e8e66e9e6b9f65e560db0aba22c3458","after":"1ad3b797ec738d10feb5266bd3e258d534b0ed7e","ref":"refs/heads/ipex-llm-embedding","pushedAt":"2024-04-11T15:59:14.000Z","pushType":"push","commitsCount":1,"pusher":{"login":"logan-markewich","name":"Logan","path":"/logan-markewich","primaryAvatarUrl":"https://avatars.githubusercontent.com/u/22285038?s=80&v=4"},"commit":{"message":"build files","shortMessageHtmlLink":"build files"}},{"before":"2b77f89775840d6b796bcc693f7593d2aebc5fec","after":"f3b136ffc4ef536900991b7adcbd9c1f89d7dfc2","ref":"refs/heads/load-lowbit-new","pushedAt":"2024-04-11T07:45:02.000Z","pushType":"pr_merge","commitsCount":6,"pusher":{"login":"shane-huang","name":"Shengsheng Huang","path":"/shane-huang","primaryAvatarUrl":"https://avatars.githubusercontent.com/u/1995599?s=80&v=4"},"commit":{"message":"Merge pull request #17 from shane-huang/load-lowbit2\n\nload low bit model support","shortMessageHtmlLink":"Merge pull request #17 from shane-huang/load-lowbit2"}},{"before":null,"after":"2b77f89775840d6b796bcc693f7593d2aebc5fec","ref":"refs/heads/load-lowbit-new","pushedAt":"2024-04-11T07:39:33.000Z","pushType":"branch_creation","commitsCount":0,"pusher":{"login":"shane-huang","name":"Shengsheng Huang","path":"/shane-huang","primaryAvatarUrl":"https://avatars.githubusercontent.com/u/1995599?s=80&v=4"},"commit":{"message":"[#12713] Fix documentation for Postgres classes (#12714)\n\n[#12713] Updating documentation for Postgres classes to remove mongodb references","shortMessageHtmlLink":"[run-llama#12713] Fix documentation for Postgres classes (run-llama#1…"}},{"before":"2f2d5a4735dd82f8acd2f630c745758e92202be2","after":"2f6e48424831d41ef08d1344150e5a45a0f6b671","ref":"refs/heads/main","pushedAt":"2024-04-11T07:26:18.000Z","pushType":"pr_merge","commitsCount":6,"pusher":{"login":"shane-huang","name":"Shengsheng Huang","path":"/shane-huang","primaryAvatarUrl":"https://avatars.githubusercontent.com/u/1995599?s=80&v=4"},"commit":{"message":"Merge pull request #15 from shane-huang/load-lowbit\n\nadd load low-bit model support and update examples","shortMessageHtmlLink":"Merge pull request #15 from shane-huang/load-lowbit"}},{"before":"8e54eb89103439cdd21281ad1035ee5a9b40d478","after":"fc7ecb1a3e8e66e9e6b9f65e560db0aba22c3458","ref":"refs/heads/ipex-llm-embedding","pushedAt":"2024-04-11T07:18:11.000Z","pushType":"pr_merge","commitsCount":2,"pusher":{"login":"Oscilloscope98","name":"Yuwen Hu","path":"/Oscilloscope98","primaryAvatarUrl":"https://avatars.githubusercontent.com/u/54161268?s=80&v=4"},"commit":{"message":"Merge pull request #14 from Oscilloscope98/small-embedding-dependency-update\n\nUpdate dependency regarding ipex-llm version","shortMessageHtmlLink":"Merge pull request #14 from Oscilloscope98/small-embedding-dependency…"}},{"before":"7431b70ec1420075051b887b82810f46dbe7a042","after":"8e54eb89103439cdd21281ad1035ee5a9b40d478","ref":"refs/heads/ipex-llm-embedding","pushedAt":"2024-04-11T06:57:53.000Z","pushType":"pr_merge","commitsCount":3,"pusher":{"login":"Oscilloscope98","name":"Yuwen Hu","path":"/Oscilloscope98","primaryAvatarUrl":"https://avatars.githubusercontent.com/u/54161268?s=80&v=4"},"commit":{"message":"Merge pull request #13 from Oscilloscope98/adaption-note\n\nAdd adaption note","shortMessageHtmlLink":"Merge pull request #13 from Oscilloscope98/adaption-note"}},{"before":"2f2d5a4735dd82f8acd2f630c745758e92202be2","after":"7431b70ec1420075051b887b82810f46dbe7a042","ref":"refs/heads/ipex-llm-embedding","pushedAt":"2024-04-11T06:37:33.000Z","pushType":"pr_merge","commitsCount":14,"pusher":{"login":"Oscilloscope98","name":"Yuwen Hu","path":"/Oscilloscope98","primaryAvatarUrl":"https://avatars.githubusercontent.com/u/54161268?s=80&v=4"},"commit":{"message":"Merge pull request #12 from Oscilloscope98/initial-integration-ipex-llm-embedding\n\nAdd initial ipex-llm embedding integrations support for BGE models on CPU","shortMessageHtmlLink":"Merge pull request #12 from Oscilloscope98/initial-integration-ipex-l…"}},{"before":"f26bbf35f2eab1397531377d5ffbe6f8aec30e5e","after":"ace9e73c9ef1800df2f6e31579d7d3de3c82edf4","ref":"refs/heads/load-lowbit","pushedAt":"2024-04-11T03:46:46.000Z","pushType":"pr_merge","commitsCount":4,"pusher":{"login":"shane-huang","name":"Shengsheng Huang","path":"/shane-huang","primaryAvatarUrl":"https://avatars.githubusercontent.com/u/1995599?s=80&v=4"},"commit":{"message":"Merge pull request #11 from ivy-lv11/load-lowbit\n\nAdd from_model_id and from_model_id_low_bit","shortMessageHtmlLink":"Merge pull request #11 from ivy-lv11/load-lowbit"}},{"before":"ff73754c5b68e9f4e49b1d55bc70e10d18462bce","after":"2f2d5a4735dd82f8acd2f630c745758e92202be2","ref":"refs/heads/ipex-llm-embedding","pushedAt":"2024-04-09T06:36:57.000Z","pushType":"push","commitsCount":145,"pusher":{"login":"Oscilloscope98","name":"Yuwen Hu","path":"/Oscilloscope98","primaryAvatarUrl":"https://avatars.githubusercontent.com/u/54161268?s=80&v=4"},"commit":{"message":"Add missing node postprocessor in CondensePlusContextChatEngine async mode. (#12663)\n\nfix missing node postprocessor in CondensePlusContextChatEngine async","shortMessageHtmlLink":"Add missing node postprocessor in CondensePlusContextChatEngine async…"}},{"before":"ff73754c5b68e9f4e49b1d55bc70e10d18462bce","after":"2f2d5a4735dd82f8acd2f630c745758e92202be2","ref":"refs/heads/main","pushedAt":"2024-04-09T06:28:19.000Z","pushType":"push","commitsCount":145,"pusher":{"login":"Oscilloscope98","name":"Yuwen Hu","path":"/Oscilloscope98","primaryAvatarUrl":"https://avatars.githubusercontent.com/u/54161268?s=80&v=4"},"commit":{"message":"Add missing node postprocessor in CondensePlusContextChatEngine async mode. (#12663)\n\nfix missing node postprocessor in CondensePlusContextChatEngine async","shortMessageHtmlLink":"Add missing node postprocessor in CondensePlusContextChatEngine async…"}}],"hasNextPage":true,"hasPreviousPage":false,"activityType":"all","actor":null,"timePeriod":"all","sort":"DESC","perPage":30,"cursor":"djE6ks8AAAAEUcVilgA","startCursor":null,"endCursor":null}},"title":"Activity · intel-analytics/llama_index"}