{"payload":{"pageCount":4,"repositories":[{"type":"Public","name":"transformers","owner":"huggingface","isFork":false,"description":"🤗 Transformers: State-of-the-art Machine Learning for Pytorch, TensorFlow, and JAX.","allTopics":["python","seq2seq","flax","language-models","nlp-library","hacktoberfest","jax","pytorch-transformers","model-hub","nlp","machine-learning","natural-language-processing","deep-learning","tensorflow","pytorch","transformer","speech-recognition","pretrained-models","language-model","bert"],"primaryLanguage":{"name":"Python","color":"#3572A5"},"pullRequestCount":263,"issueCount":929,"starsCount":129353,"forksCount":25647,"license":"Apache License 2.0","participation":[68,70,53,65,67,64,75,48,64,36,50,63,62,55,64,74,68,62,49,43,81,62,38,2,29,58,51,52,50,43,52,51,55,63,60,58,48,56,59,72,72,65,55,59,65,66,66,43,47,37,43,53],"lastUpdated":{"hasBeenPushedTo":true,"timestamp":"2024-07-16T16:34:47.071Z"}},{"type":"Public","name":"huggingface_hub","owner":"huggingface","isFork":false,"description":"The official Python client for the Huggingface Hub.","allTopics":["machine-learning","natural-language-processing","deep-learning","models","pytorch","pretrained-models","hacktoberfest","model-hub"],"primaryLanguage":{"name":"Python","color":"#3572A5"},"pullRequestCount":12,"issueCount":120,"starsCount":1845,"forksCount":475,"license":"Apache License 2.0","participation":[3,3,1,4,10,4,14,28,10,13,2,9,15,7,10,16,7,7,22,11,3,13,11,0,9,11,8,4,1,6,4,10,14,6,8,12,8,10,14,15,18,9,0,1,11,12,11,12,1,8,7,11],"lastUpdated":{"hasBeenPushedTo":true,"timestamp":"2024-07-16T16:00:39.391Z"}},{"type":"Public","name":"lerobot","owner":"huggingface","isFork":false,"description":"🤗 LeRobot: End-to-end Learning for Real-World Robotics in Pytorch","allTopics":[],"primaryLanguage":{"name":"Python","color":"#3572A5"},"pullRequestCount":26,"issueCount":25,"starsCount":4422,"forksCount":364,"license":"Apache License 2.0","participation":[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3,3,1,5,22,41,65,37,76,44,56,66,45,18,21,16,13,5,16,8,8,5,2,4,7],"lastUpdated":{"hasBeenPushedTo":true,"timestamp":"2024-07-16T16:11:00.322Z"}},{"type":"Public","name":"cosmopedia","owner":"huggingface","isFork":false,"description":"","allTopics":[],"primaryLanguage":{"name":"Python","color":"#3572A5"},"pullRequestCount":3,"issueCount":5,"starsCount":352,"forksCount":30,"license":"Apache License 2.0","participation":[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,11,30,0,1,0,2,0,0,0,1,2,0,0,0,0,4,0,0,0,0,0,0],"lastUpdated":{"hasBeenPushedTo":true,"timestamp":"2024-07-16T14:16:12.046Z"}},{"type":"Public","name":"text-generation-inference","owner":"huggingface","isFork":false,"description":"Large Language Model Text Generation Inference","allTopics":["nlp","bloom","deep-learning","inference","pytorch","falcon","transformer","gpt","starcoder"],"primaryLanguage":{"name":"Python","color":"#3572A5"},"pullRequestCount":18,"issueCount":105,"starsCount":8403,"forksCount":955,"license":"Apache License 2.0","participation":[18,18,8,18,8,5,7,12,3,4,19,14,2,8,1,0,4,3,8,5,6,15,5,0,0,6,4,21,10,8,12,8,12,0,5,11,4,7,17,8,18,20,2,21,15,16,24,13,9,34,28,11],"lastUpdated":{"hasBeenPushedTo":true,"timestamp":"2024-07-16T15:58:23.803Z"}},{"type":"Public","name":"peft","owner":"huggingface","isFork":false,"description":"🤗 PEFT: State-of-the-art Parameter-Efficient Fine-Tuning.","allTopics":["python","adapter","transformers","pytorch","lora","diffusion","parameter-efficient-learning","llm"],"primaryLanguage":{"name":"Python","color":"#3572A5"},"pullRequestCount":12,"issueCount":18,"starsCount":15053,"forksCount":1444,"license":"Apache License 2.0","participation":[11,5,5,6,8,9,8,7,9,6,10,12,7,1,7,9,18,16,10,17,21,17,5,3,5,19,6,3,15,19,18,12,5,12,9,7,4,8,9,4,14,5,6,11,9,5,12,12,4,8,7,4],"lastUpdated":{"hasBeenPushedTo":true,"timestamp":"2024-07-16T14:13:10.668Z"}},{"type":"Public","name":"trl","owner":"huggingface","isFork":false,"description":"Train transformer language models with reinforcement learning.","allTopics":[],"primaryLanguage":{"name":"Python","color":"#3572A5"},"pullRequestCount":18,"issueCount":54,"starsCount":8778,"forksCount":1079,"license":"Apache License 2.0","participation":[13,9,6,6,8,8,14,10,21,8,7,7,9,9,7,17,8,7,4,10,12,4,17,8,10,19,11,8,15,1,10,3,12,5,7,21,4,11,13,16,5,10,3,3,12,6,13,10,11,7,9,7],"lastUpdated":{"hasBeenPushedTo":true,"timestamp":"2024-07-16T16:29:13.616Z"}},{"type":"Public","name":"lighteval","owner":"huggingface","isFork":false,"description":"LightEval is a lightweight LLM evaluation suite that Hugging Face has been using internally with the recently released LLM data processing library datatrove and LLM training library nanotron.","allTopics":["evaluation","evaluation-metrics","evaluation-framework","huggingface"],"primaryLanguage":{"name":"Python","color":"#3572A5"},"pullRequestCount":14,"issueCount":47,"starsCount":479,"forksCount":58,"license":"MIT License","participation":[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,20,10,16,4,8,16,9,3,10,6,4,4,4,2,6,3,0,1,0,0,0,0,0,7,6],"lastUpdated":{"hasBeenPushedTo":true,"timestamp":"2024-07-16T14:40:50.601Z"}},{"type":"Public","name":"optimum","owner":"huggingface","isFork":false,"description":"🚀 Accelerate training and inference of 🤗 Transformers and 🤗 Diffusers with easy to use hardware optimization tools","allTopics":["training","optimization","intel","transformers","tflite","onnxruntime","graphcore","habana","inference","pytorch","quantization","onnx"],"primaryLanguage":{"name":"Python","color":"#3572A5"},"pullRequestCount":90,"issueCount":296,"starsCount":2324,"forksCount":408,"license":"Apache License 2.0","participation":[14,13,12,9,3,14,13,14,5,5,2,7,7,9,8,14,4,6,1,2,11,15,0,1,3,10,6,9,1,9,10,8,9,0,4,9,2,7,7,1,7,1,2,1,6,7,5,1,2,9,9,3],"lastUpdated":{"hasBeenPushedTo":true,"timestamp":"2024-07-16T13:30:10.061Z"}},{"type":"Public","name":"tgi-gaudi","owner":"huggingface","isFork":true,"description":"Large Language Model Text Generation Inference on Habana Gaudi","allTopics":[],"primaryLanguage":{"name":"Python","color":"#3572A5"},"pullRequestCount":6,"issueCount":5,"starsCount":19,"forksCount":955,"license":"Apache License 2.0","participation":[18,18,8,18,8,5,7,12,3,4,19,14,2,8,1,0,4,3,8,5,11,15,7,0,1,10,11,22,10,8,16,18,27,2,10,13,9,11,21,8,23,18,0,4,2,2,2,3,2,0,11,1],"lastUpdated":{"hasBeenPushedTo":true,"timestamp":"2024-07-16T12:53:25.349Z"}},{"type":"Public","name":"accelerate","owner":"huggingface","isFork":false,"description":"🚀 A simple way to launch, train, and use PyTorch models on almost any device and distributed configuration, automatic mixed precision (including fp8), and easy-to-configure FSDP and DeepSpeed support","allTopics":[],"primaryLanguage":{"name":"Python","color":"#3572A5"},"pullRequestCount":11,"issueCount":106,"starsCount":7390,"forksCount":881,"license":"Apache License 2.0","participation":[14,10,10,10,10,13,12,9,16,2,5,5,11,4,10,10,15,19,3,8,14,5,6,5,7,15,10,6,9,14,13,9,20,11,8,10,9,13,15,8,12,14,10,6,7,6,10,12,3,4,12,8],"lastUpdated":{"hasBeenPushedTo":true,"timestamp":"2024-07-16T12:40:53.930Z"}},{"type":"Public","name":"diffusers","owner":"huggingface","isFork":false,"description":"🤗 Diffusers: State-of-the-art diffusion models for image and audio generation in PyTorch and FLAX.","allTopics":["deep-learning","pytorch","image-generation","flax","hacktoberfest","diffusion","text2image","image2image","jax","score-based-generative-modeling","stable-diffusion","stable-diffusion-diffusers","latent-diffusion-models"],"primaryLanguage":{"name":"Python","color":"#3572A5"},"pullRequestCount":146,"issueCount":378,"starsCount":24118,"forksCount":4978,"license":"Apache License 2.0","participation":[36,45,47,27,24,59,38,27,49,42,43,31,21,30,34,36,45,45,31,47,29,30,40,38,40,41,31,28,34,44,35,27,48,39,36,31,35,18,24,22,25,27,28,24,19,29,26,30,39,37,26,12],"lastUpdated":{"hasBeenPushedTo":true,"timestamp":"2024-07-16T12:28:07.303Z"}},{"type":"Public","name":"optimum-quanto","owner":"huggingface","isFork":false,"description":"A pytorch quantization backend for optimum","allTopics":["pytorch","quantization","optimum"],"primaryLanguage":{"name":"Python","color":"#3572A5"},"pullRequestCount":4,"issueCount":16,"starsCount":660,"forksCount":37,"license":"Apache License 2.0","participation":[0,0,0,0,0,0,0,0,0,0,8,34,8,23,23,0,0,8,4,26,56,37,19,1,9,3,11,4,13,6,17,35,11,15,17,13,2,9,11,13,1,6,5,7,4,18,0,6,0,15,15,5],"lastUpdated":{"hasBeenPushedTo":true,"timestamp":"2024-07-16T16:35:40.596Z"}},{"type":"Public","name":"safetensors","owner":"huggingface","isFork":false,"description":"Simple, safe way to store and distribute tensors","allTopics":[],"primaryLanguage":{"name":"Python","color":"#3572A5"},"pullRequestCount":7,"issueCount":12,"starsCount":2620,"forksCount":167,"license":"Apache License 2.0","participation":[0,2,5,6,7,9,1,3,2,1,0,1,0,0,0,0,0,7,3,0,1,0,0,0,5,1,6,2,0,0,1,0,0,0,0,0,0,0,3,0,1,0,0,0,0,0,0,0,0,0,0,0],"lastUpdated":{"hasBeenPushedTo":true,"timestamp":"2024-07-16T09:49:22.124Z"}},{"type":"Public","name":"nanotron","owner":"huggingface","isFork":false,"description":"Minimalistic large language model 3D-parallelism training","allTopics":[],"primaryLanguage":{"name":"Python","color":"#3572A5"},"pullRequestCount":30,"issueCount":35,"starsCount":958,"forksCount":87,"license":"Apache License 2.0","participation":[0,0,0,0,0,0,0,2,0,2,1,0,0,0,0,0,0,0,0,0,0,7,24,8,19,24,103,85,65,69,92,29,55,37,30,19,14,34,67,32,41,13,15,9,2,3,3,0,0,0,4,1],"lastUpdated":{"hasBeenPushedTo":true,"timestamp":"2024-07-16T11:22:27.153Z"}},{"type":"Public","name":"optimum-habana","owner":"huggingface","isFork":false,"description":"Easy and lightning fast training of 🤗 Transformers on Habana Gaudi processor (HPU)","allTopics":["transformers","bert","fine-tuning","hpu","habana"],"primaryLanguage":{"name":"Python","color":"#3572A5"},"pullRequestCount":45,"issueCount":16,"starsCount":127,"forksCount":150,"license":"Apache License 2.0","participation":[9,10,2,11,13,14,14,5,16,15,15,11,9,7,18,11,11,14,18,20,9,13,5,4,7,6,11,14,11,8,10,12,11,10,24,8,14,8,10,10,24,22,10,8,9,9,34,16,2,4,8,22],"lastUpdated":{"hasBeenPushedTo":true,"timestamp":"2024-07-16T15:35:11.959Z"}},{"type":"Public","name":"dataset-viewer","owner":"huggingface","isFork":false,"description":"Lightweight web API for visualizing and exploring any dataset - computer vision, speech, text, and tabular - stored on the Hugging Face Hub","allTopics":["nlp","data","machine-learning","api-rest","datasets","huggingface"],"primaryLanguage":{"name":"Python","color":"#3572A5"},"pullRequestCount":8,"issueCount":132,"starsCount":647,"forksCount":65,"license":"Apache License 2.0","participation":[21,25,51,19,22,13,18,21,15,22,38,32,24,17,23,16,19,18,10,7,16,14,7,0,13,18,15,9,18,23,14,24,24,12,23,10,13,16,24,13,9,10,11,30,16,13,14,11,21,16,4,12],"lastUpdated":{"hasBeenPushedTo":true,"timestamp":"2024-07-16T06:20:29.708Z"}},{"type":"Public","name":"datatrove","owner":"huggingface","isFork":false,"description":"Freeing data processing from scripting madness by providing a set of platform-agnostic customizable pipeline processing blocks.","allTopics":[],"primaryLanguage":{"name":"Python","color":"#3572A5"},"pullRequestCount":6,"issueCount":35,"starsCount":1781,"forksCount":109,"license":"Apache License 2.0","participation":[5,14,4,9,3,0,4,4,2,0,0,3,4,16,1,10,3,23,22,11,6,11,3,1,0,1,9,7,11,7,7,7,6,7,1,9,0,3,5,12,2,5,7,4,13,3,3,7,2,3,9,2],"lastUpdated":{"hasBeenPushedTo":true,"timestamp":"2024-07-15T16:57:49.220Z"}},{"type":"Public","name":"datasets","owner":"huggingface","isFork":false,"description":"🤗 The largest hub of ready-to-use datasets for ML models with fast, easy-to-use and efficient data manipulation tools","allTopics":["machine-learning","natural-language-processing","computer-vision","deep-learning","tensorflow","numpy","speech","pandas","datasets","hacktoberfest","nlp","pytorch"],"primaryLanguage":{"name":"Python","color":"#3572A5"},"pullRequestCount":83,"issueCount":640,"starsCount":18764,"forksCount":2590,"license":"Apache License 2.0","participation":[9,17,3,3,8,6,9,9,5,3,6,1,7,14,4,7,2,14,8,11,5,11,15,4,1,6,1,11,10,10,5,3,20,5,6,6,1,5,9,9,2,7,3,8,7,8,13,8,5,15,5,6],"lastUpdated":{"hasBeenPushedTo":true,"timestamp":"2024-07-15T15:59:57.441Z"}},{"type":"Public","name":"optimum-benchmark","owner":"huggingface","isFork":false,"description":"🏋️ A unified multi-backend utility for benchmarking Transformers, Timm, PEFT, Diffusers and Sentence-Transformers with full support of Optimum's hardware optimizations & quantization schemes.","allTopics":["benchmark","pytorch","openvino","onnxruntime","text-generation-inference","neural-compressor","tensorrt-llm"],"primaryLanguage":{"name":"Python","color":"#3572A5"},"pullRequestCount":0,"issueCount":8,"starsCount":216,"forksCount":34,"license":"Apache License 2.0","participation":[6,6,19,75,22,39,53,31,13,10,0,2,1,25,7,12,1,7,4,8,7,0,0,0,3,5,2,0,0,1,5,9,5,1,9,5,4,4,6,2,1,4,8,18,1,0,0,1,0,2,5,2],"lastUpdated":{"hasBeenPushedTo":true,"timestamp":"2024-07-15T15:15:44.523Z"}},{"type":"Public","name":"bench_cluster","owner":"huggingface","isFork":false,"description":"","allTopics":[],"primaryLanguage":{"name":"Python","color":"#3572A5"},"pullRequestCount":0,"issueCount":0,"starsCount":1,"forksCount":0,"license":null,"participation":[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,27,19,20,33],"lastUpdated":{"hasBeenPushedTo":true,"timestamp":"2024-07-15T12:24:01.227Z"}},{"type":"Public","name":"competitions","owner":"huggingface","isFork":false,"description":"","allTopics":[],"primaryLanguage":{"name":"Python","color":"#3572A5"},"pullRequestCount":0,"issueCount":0,"starsCount":109,"forksCount":9,"license":"Apache License 2.0","participation":[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,17,20,4,10,6,11,1,13,27,0,17,4,1,0,7,2,0,0,0,0,3,0,0,0,0,4,0,5],"lastUpdated":{"hasBeenPushedTo":true,"timestamp":"2024-07-15T11:07:24.708Z"}},{"type":"Public","name":"pytorch-image-models","owner":"huggingface","isFork":false,"description":"The largest collection of PyTorch image encoders / backbones. Including train, eval, inference, export scripts, and pretrained weights -- ResNet, ResNeXT, EfficientNet, NFNet, Vision Transformer (ViT), MobileNetV4, MobileNet-V3 & V2, RegNet, DPN, CSPNet, Swin Transformer, MaxViT, CoAtNet, ConvNeXt, and more","allTopics":["pytorch","imagenet","image-classification","resnet","pretrained-models","mixnet","pretrained-weights","distributed-training","dual-path-networks","mobilenet-v2","mobile-deep-learning","mobilenetv3","efficientnet","augmix","randaugment","nfnets","normalization-free-training","vision-transformer-models","convnext","maxvit"],"primaryLanguage":{"name":"Python","color":"#3572A5"},"pullRequestCount":27,"issueCount":53,"starsCount":30793,"forksCount":4642,"license":"Apache License 2.0","participation":[1,10,19,11,22,18,18,0,2,4,8,5,2,21,6,10,2,12,17,1,8,2,1,5,3,4,3,9,5,16,8,0,0,2,7,9,0,3,17,4,1,19,17,20,14,5,26,21,10,3,2,2],"lastUpdated":{"hasBeenPushedTo":true,"timestamp":"2024-07-15T04:35:25.504Z"}},{"type":"Public","name":"distil-whisper","owner":"huggingface","isFork":false,"description":"Distilled variant of Whisper for speech recognition. 6x faster, 50% smaller, within 1% word error rate.","allTopics":["audio","speech-recognition","whisper"],"primaryLanguage":{"name":"Python","color":"#3572A5"},"pullRequestCount":6,"issueCount":53,"starsCount":3358,"forksCount":244,"license":"MIT License","participation":[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,38,1,37,0,0,7,1,2,0,0,0,3,2,4,0,0,0,0,0,0,1,27,14,2,3,6,26,0,1,1,2,6,1,0,4,0,2],"lastUpdated":{"hasBeenPushedTo":true,"timestamp":"2024-07-12T13:57:27.387Z"}},{"type":"Public","name":"optimum-tpu","owner":"huggingface","isFork":false,"description":"Google TPU optimizations for transformers models","allTopics":[],"primaryLanguage":{"name":"Python","color":"#3572A5"},"pullRequestCount":0,"issueCount":4,"starsCount":45,"forksCount":9,"license":"Apache License 2.0","participation":[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,2,0,2,1,2,11,3,1,4,7,0,3,1,3,4,4,2,2,3,5],"lastUpdated":{"hasBeenPushedTo":true,"timestamp":"2024-07-12T12:04:32.665Z"}},{"type":"Public","name":"controlnet_aux","owner":"huggingface","isFork":false,"description":"","allTopics":[],"primaryLanguage":{"name":"Python","color":"#3572A5"},"pullRequestCount":8,"issueCount":38,"starsCount":347,"forksCount":74,"license":"Apache License 2.0","participation":[0,0,4,0,4,1,7,2,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,2,0,0,0,0,0,0,0,0,0,0,3,0,0,0,0,0,3,0,1,0,0,0,0,0],"lastUpdated":{"hasBeenPushedTo":true,"timestamp":"2024-07-12T03:09:00.850Z"}},{"type":"Public","name":"parler-tts","owner":"huggingface","isFork":false,"description":"Inference and training library for high-quality TTS models.","allTopics":[],"primaryLanguage":{"name":"Python","color":"#3572A5"},"pullRequestCount":7,"issueCount":43,"starsCount":2883,"forksCount":294,"license":"Apache License 2.0","participation":[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,32,17,10,8,4,2,1,31,33,0,8,4,4,8,11,2,0,0,0,0,0,0],"lastUpdated":{"hasBeenPushedTo":true,"timestamp":"2024-07-11T17:15:27.082Z"}},{"type":"Public","name":"llm-swarm","owner":"huggingface","isFork":false,"description":"Manage scalable open LLM inference endpoints in Slurm clusters","allTopics":[],"primaryLanguage":{"name":"Python","color":"#3572A5"},"pullRequestCount":0,"issueCount":0,"starsCount":194,"forksCount":16,"license":"MIT License","participation":[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,15,5,20,0,0,0,2,0,0,9,3,36,17,29,1,0,2,0,2,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,1,2],"lastUpdated":{"hasBeenPushedTo":true,"timestamp":"2024-07-11T16:39:23.498Z"}},{"type":"Public","name":"evaluate","owner":"huggingface","isFork":false,"description":"🤗 Evaluate: A library for easily evaluating machine learning models and datasets.","allTopics":["evaluation","machine-learning"],"primaryLanguage":{"name":"Python","color":"#3572A5"},"pullRequestCount":55,"issueCount":143,"starsCount":1900,"forksCount":235,"license":"Apache License 2.0","participation":[0,0,0,0,0,0,1,0,0,0,1,0,2,1,0,1,0,0,0,0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4,0,1,0,0,0,0,1,0,0,0,0,0],"lastUpdated":{"hasBeenPushedTo":true,"timestamp":"2024-07-11T09:24:36.555Z"}},{"type":"Public","name":"api-inference-community","owner":"huggingface","isFork":false,"description":"","allTopics":["hacktoberfest"],"primaryLanguage":{"name":"Python","color":"#3572A5"},"pullRequestCount":42,"issueCount":33,"starsCount":151,"forksCount":58,"license":"Apache License 2.0","participation":[1,6,8,1,0,1,0,0,0,0,1,3,2,1,2,0,3,1,0,3,7,0,1,3,0,2,0,0,4,1,0,0,5,0,1,1,2,0,0,0,0,0,0,0,0,0,0,1,4,1,2,0],"lastUpdated":{"hasBeenPushedTo":true,"timestamp":"2024-07-10T14:42:12.206Z"}}],"repositoryCount":114,"userInfo":null,"searchable":true,"definitions":[],"typeFilters":[{"id":"all","text":"All"},{"id":"public","text":"Public"},{"id":"source","text":"Sources"},{"id":"fork","text":"Forks"},{"id":"archived","text":"Archived"},{"id":"template","text":"Templates"}],"compactMode":false},"title":"huggingface repositories"}