[{"data":1,"prerenderedAt":1217},["ShallowReactive",2],{"/en-us/blog/":3,"navigation-en-us":21,"banner-en-us":433,"footer-en-us":446,"blogCategories-en-us":658,"relatedBlogPosts-en-us":771,"maineFeaturedPost-en-us":1164,"recentFeaturedPosts-en-us":1169,"recentPosts-en-us":1185},{"_path":4,"_dir":5,"_draft":6,"_partial":6,"_locale":7,"seo":8,"content":11,"config":13,"_id":15,"_type":16,"title":7,"_source":17,"_file":18,"_stem":19,"_extension":20},"/en-us/blog","en-us",false,"",{"title":9,"description":10},"Blog | GitLab","Tutorials, product information, expert insights, and more from GitLab to help DevSecOps teams build, test, and deploy secure software faster.",{"title":12},"GitLab Blog",{"template":14},"BlogHome","content:en-us:blog:index.yml","yaml","content","en-us/blog/index.yml","en-us/blog/index","yml",{"_path":22,"_dir":5,"_draft":6,"_partial":6,"_locale":7,"data":23,"_id":429,"_type":16,"title":430,"_source":17,"_file":431,"_stem":432,"_extension":20},"/shared/en-us/main-navigation",{"logo":24,"freeTrial":29,"sales":34,"login":39,"items":44,"search":375,"minimal":406,"duo":420},{"config":25},{"href":26,"dataGaName":27,"dataGaLocation":28},"/","gitlab logo","header",{"text":30,"config":31},"Get free trial",{"href":32,"dataGaName":33,"dataGaLocation":28},"https://gitlab.com/-/trial_registrations/new?glm_source=about.gitlab.com&glm_content=default-saas-trial/","free trial",{"text":35,"config":36},"Talk to sales",{"href":37,"dataGaName":38,"dataGaLocation":28},"/sales/","sales",{"text":40,"config":41},"Sign in",{"href":42,"dataGaName":43,"dataGaLocation":28},"https://gitlab.com/users/sign_in/","sign in",[45,89,185,190,296,356],{"text":46,"config":47,"cards":49,"footer":72},"Platform",{"dataNavLevelOne":48},"platform",[50,56,64],{"title":46,"description":51,"link":52},"The most comprehensive AI-powered DevSecOps Platform",{"text":53,"config":54},"Explore our Platform",{"href":55,"dataGaName":48,"dataGaLocation":28},"/platform/",{"title":57,"description":58,"link":59},"GitLab Duo (AI)","Build software faster with AI at every stage of development",{"text":60,"config":61},"Meet GitLab Duo",{"href":62,"dataGaName":63,"dataGaLocation":28},"/gitlab-duo/","gitlab duo ai",{"title":65,"description":66,"link":67},"Why GitLab","10 reasons why Enterprises choose GitLab",{"text":68,"config":69},"Learn more",{"href":70,"dataGaName":71,"dataGaLocation":28},"/why-gitlab/","why gitlab",{"title":73,"items":74},"Get started with",[75,80,85],{"text":76,"config":77},"Platform Engineering",{"href":78,"dataGaName":79,"dataGaLocation":28},"/solutions/platform-engineering/","platform engineering",{"text":81,"config":82},"Developer Experience",{"href":83,"dataGaName":84,"dataGaLocation":28},"/developer-experience/","Developer experience",{"text":86,"config":87},"MLOps",{"href":88,"dataGaName":86,"dataGaLocation":28},"/topics/devops/the-role-of-ai-in-devops/",{"text":90,"left":91,"config":92,"link":94,"lists":98,"footer":167},"Product",true,{"dataNavLevelOne":93},"solutions",{"text":95,"config":96},"View all Solutions",{"href":97,"dataGaName":93,"dataGaLocation":28},"/solutions/",[99,124,146],{"title":100,"description":101,"link":102,"items":107},"Automation","CI/CD and automation to accelerate deployment",{"config":103},{"icon":104,"href":105,"dataGaName":106,"dataGaLocation":28},"AutomatedCodeAlt","/solutions/delivery-automation/","automated software delivery",[108,112,116,120],{"text":109,"config":110},"CI/CD",{"href":111,"dataGaLocation":28,"dataGaName":109},"/solutions/continuous-integration/",{"text":113,"config":114},"AI-Assisted Development",{"href":62,"dataGaLocation":28,"dataGaName":115},"AI assisted development",{"text":117,"config":118},"Source Code Management",{"href":119,"dataGaLocation":28,"dataGaName":117},"/solutions/source-code-management/",{"text":121,"config":122},"Automated Software Delivery",{"href":105,"dataGaLocation":28,"dataGaName":123},"Automated software delivery",{"title":125,"description":126,"link":127,"items":132},"Security","Deliver code faster without compromising security",{"config":128},{"href":129,"dataGaName":130,"dataGaLocation":28,"icon":131},"/solutions/security-compliance/","security and compliance","ShieldCheckLight",[133,136,141],{"text":134,"config":135},"Security & Compliance",{"href":129,"dataGaLocation":28,"dataGaName":134},{"text":137,"config":138},"Software Supply Chain Security",{"href":139,"dataGaLocation":28,"dataGaName":140},"/solutions/supply-chain/","Software supply chain security",{"text":142,"config":143},"Compliance & Governance",{"href":144,"dataGaLocation":28,"dataGaName":145},"/solutions/continuous-software-compliance/","Compliance and governance",{"title":147,"link":148,"items":153},"Measurement",{"config":149},{"icon":150,"href":151,"dataGaName":152,"dataGaLocation":28},"DigitalTransformation","/solutions/visibility-measurement/","visibility and measurement",[154,158,162],{"text":155,"config":156},"Visibility & Measurement",{"href":151,"dataGaLocation":28,"dataGaName":157},"Visibility and Measurement",{"text":159,"config":160},"Value Stream Management",{"href":161,"dataGaLocation":28,"dataGaName":159},"/solutions/value-stream-management/",{"text":163,"config":164},"Analytics & Insights",{"href":165,"dataGaLocation":28,"dataGaName":166},"/solutions/analytics-and-insights/","Analytics and insights",{"title":168,"items":169},"GitLab for",[170,175,180],{"text":171,"config":172},"Enterprise",{"href":173,"dataGaLocation":28,"dataGaName":174},"/enterprise/","enterprise",{"text":176,"config":177},"Small Business",{"href":178,"dataGaLocation":28,"dataGaName":179},"/small-business/","small business",{"text":181,"config":182},"Public Sector",{"href":183,"dataGaLocation":28,"dataGaName":184},"/solutions/public-sector/","public sector",{"text":186,"config":187},"Pricing",{"href":188,"dataGaName":189,"dataGaLocation":28,"dataNavLevelOne":189},"/pricing/","pricing",{"text":191,"config":192,"link":194,"lists":198,"feature":283},"Resources",{"dataNavLevelOne":193},"resources",{"text":195,"config":196},"View all resources",{"href":197,"dataGaName":193,"dataGaLocation":28},"/resources/",[199,232,255],{"title":200,"items":201},"Getting started",[202,207,212,217,222,227],{"text":203,"config":204},"Install",{"href":205,"dataGaName":206,"dataGaLocation":28},"/install/","install",{"text":208,"config":209},"Quick start guides",{"href":210,"dataGaName":211,"dataGaLocation":28},"/get-started/","quick setup checklists",{"text":213,"config":214},"Learn",{"href":215,"dataGaLocation":28,"dataGaName":216},"https://university.gitlab.com/","learn",{"text":218,"config":219},"Product documentation",{"href":220,"dataGaName":221,"dataGaLocation":28},"https://docs.gitlab.com/","product documentation",{"text":223,"config":224},"Best practice videos",{"href":225,"dataGaName":226,"dataGaLocation":28},"/getting-started-videos/","best practice videos",{"text":228,"config":229},"Integrations",{"href":230,"dataGaName":231,"dataGaLocation":28},"/integrations/","integrations",{"title":233,"items":234},"Discover",[235,240,245,250],{"text":236,"config":237},"Customer success stories",{"href":238,"dataGaName":239,"dataGaLocation":28},"/customers/","customer success stories",{"text":241,"config":242},"Blog",{"href":243,"dataGaName":244,"dataGaLocation":28},"/blog/","blog",{"text":246,"config":247},"Remote",{"href":248,"dataGaName":249,"dataGaLocation":28},"https://handbook.gitlab.com/handbook/company/culture/all-remote/","remote",{"text":251,"config":252},"TeamOps",{"href":253,"dataGaName":254,"dataGaLocation":28},"/teamops/","teamops",{"title":256,"items":257},"Connect",[258,263,268,273,278],{"text":259,"config":260},"GitLab Services",{"href":261,"dataGaName":262,"dataGaLocation":28},"/services/","services",{"text":264,"config":265},"Community",{"href":266,"dataGaName":267,"dataGaLocation":28},"/community/","community",{"text":269,"config":270},"Forum",{"href":271,"dataGaName":272,"dataGaLocation":28},"https://forum.gitlab.com/","forum",{"text":274,"config":275},"Events",{"href":276,"dataGaName":277,"dataGaLocation":28},"/events/","events",{"text":279,"config":280},"Partners",{"href":281,"dataGaName":282,"dataGaLocation":28},"/partners/","partners",{"backgroundColor":284,"textColor":285,"text":286,"image":287,"link":291},"#2f2a6b","#fff","Insights for the future of software development",{"altText":288,"config":289},"the source promo card",{"src":290},"/images/navigation/the-source-promo-card.svg",{"text":292,"config":293},"Read the latest",{"href":294,"dataGaName":295,"dataGaLocation":28},"/the-source/","the source",{"text":297,"config":298,"lists":300},"Company",{"dataNavLevelOne":299},"company",[301],{"items":302},[303,308,314,316,321,326,331,336,341,346,351],{"text":304,"config":305},"About",{"href":306,"dataGaName":307,"dataGaLocation":28},"/company/","about",{"text":309,"config":310,"footerGa":313},"Jobs",{"href":311,"dataGaName":312,"dataGaLocation":28},"/jobs/","jobs",{"dataGaName":312},{"text":274,"config":315},{"href":276,"dataGaName":277,"dataGaLocation":28},{"text":317,"config":318},"Leadership",{"href":319,"dataGaName":320,"dataGaLocation":28},"/company/team/e-group/","leadership",{"text":322,"config":323},"Team",{"href":324,"dataGaName":325,"dataGaLocation":28},"/company/team/","team",{"text":327,"config":328},"Handbook",{"href":329,"dataGaName":330,"dataGaLocation":28},"https://handbook.gitlab.com/","handbook",{"text":332,"config":333},"Investor relations",{"href":334,"dataGaName":335,"dataGaLocation":28},"https://ir.gitlab.com/","investor relations",{"text":337,"config":338},"Trust Center",{"href":339,"dataGaName":340,"dataGaLocation":28},"/security/","trust center",{"text":342,"config":343},"AI Transparency Center",{"href":344,"dataGaName":345,"dataGaLocation":28},"/ai-transparency-center/","ai transparency center",{"text":347,"config":348},"Newsletter",{"href":349,"dataGaName":350,"dataGaLocation":28},"/company/contact/","newsletter",{"text":352,"config":353},"Press",{"href":354,"dataGaName":355,"dataGaLocation":28},"/press/","press",{"text":357,"config":358,"lists":359},"Contact us",{"dataNavLevelOne":299},[360],{"items":361},[362,365,370],{"text":35,"config":363},{"href":37,"dataGaName":364,"dataGaLocation":28},"talk to sales",{"text":366,"config":367},"Get help",{"href":368,"dataGaName":369,"dataGaLocation":28},"/support/","get help",{"text":371,"config":372},"Customer portal",{"href":373,"dataGaName":374,"dataGaLocation":28},"https://customers.gitlab.com/customers/sign_in/","customer portal",{"close":376,"login":377,"suggestions":384},"Close",{"text":378,"link":379},"To search repositories and projects, login to",{"text":380,"config":381},"gitlab.com",{"href":42,"dataGaName":382,"dataGaLocation":383},"search login","search",{"text":385,"default":386},"Suggestions",[387,389,393,395,399,403],{"text":57,"config":388},{"href":62,"dataGaName":57,"dataGaLocation":383},{"text":390,"config":391},"Code Suggestions (AI)",{"href":392,"dataGaName":390,"dataGaLocation":383},"/solutions/code-suggestions/",{"text":109,"config":394},{"href":111,"dataGaName":109,"dataGaLocation":383},{"text":396,"config":397},"GitLab on AWS",{"href":398,"dataGaName":396,"dataGaLocation":383},"/partners/technology-partners/aws/",{"text":400,"config":401},"GitLab on Google Cloud",{"href":402,"dataGaName":400,"dataGaLocation":383},"/partners/technology-partners/google-cloud-platform/",{"text":404,"config":405},"Why GitLab?",{"href":70,"dataGaName":404,"dataGaLocation":383},{"freeTrial":407,"mobileIcon":412,"desktopIcon":417},{"text":408,"config":409},"Start free trial",{"href":410,"dataGaName":33,"dataGaLocation":411},"https://gitlab.com/-/trials/new/","nav",{"altText":413,"config":414},"Gitlab Icon",{"src":415,"dataGaName":416,"dataGaLocation":411},"/images/brand/gitlab-logo-tanuki.svg","gitlab icon",{"altText":413,"config":418},{"src":419,"dataGaName":416,"dataGaLocation":411},"/images/brand/gitlab-logo-type.svg",{"freeTrial":421,"mobileIcon":425,"desktopIcon":427},{"text":422,"config":423},"Learn more about GitLab Duo",{"href":62,"dataGaName":424,"dataGaLocation":411},"gitlab duo",{"altText":413,"config":426},{"src":415,"dataGaName":416,"dataGaLocation":411},{"altText":413,"config":428},{"src":419,"dataGaName":416,"dataGaLocation":411},"content:shared:en-us:main-navigation.yml","Main Navigation","shared/en-us/main-navigation.yml","shared/en-us/main-navigation",{"_path":434,"_dir":5,"_draft":6,"_partial":6,"_locale":7,"title":435,"titleMobile":435,"button":436,"config":441,"_id":443,"_type":16,"_source":17,"_file":444,"_stem":445,"_extension":20},"/shared/en-us/banner","GitLab 18 & the next step in intelligent DevSecOps. Join us June 24.",{"text":437,"config":438},"Register now",{"href":439,"dataGaName":440,"dataGaLocation":28},"/eighteen/","gitlab 18 banner",{"layout":442},"release","content:shared:en-us:banner.yml","shared/en-us/banner.yml","shared/en-us/banner",{"_path":447,"_dir":5,"_draft":6,"_partial":6,"_locale":7,"data":448,"_id":654,"_type":16,"title":655,"_source":17,"_file":656,"_stem":657,"_extension":20},"/shared/en-us/main-footer",{"text":449,"source":450,"edit":456,"contribute":461,"config":466,"items":471,"minimal":646},"Git is a trademark of Software Freedom Conservancy and our use of 'GitLab' is under license",{"text":451,"config":452},"View page source",{"href":453,"dataGaName":454,"dataGaLocation":455},"https://gitlab.com/gitlab-com/marketing/digital-experience/about-gitlab-com/","page source","footer",{"text":457,"config":458},"Edit this page",{"href":459,"dataGaName":460,"dataGaLocation":455},"https://gitlab.com/gitlab-com/marketing/digital-experience/about-gitlab-com/-/blob/main/content/","web ide",{"text":462,"config":463},"Please contribute",{"href":464,"dataGaName":465,"dataGaLocation":455},"https://gitlab.com/gitlab-com/marketing/digital-experience/about-gitlab-com/-/blob/main/CONTRIBUTING.md/","please contribute",{"twitter":467,"facebook":468,"youtube":469,"linkedin":470},"https://twitter.com/gitlab","https://www.facebook.com/gitlab","https://www.youtube.com/channel/UCnMGQ8QHMAnVIsI3xJrihhg","https://www.linkedin.com/company/gitlab-com",[472,495,552,581,616],{"title":46,"links":473,"subMenu":478},[474],{"text":475,"config":476},"DevSecOps platform",{"href":55,"dataGaName":477,"dataGaLocation":455},"devsecops platform",[479],{"title":186,"links":480},[481,485,490],{"text":482,"config":483},"View plans",{"href":188,"dataGaName":484,"dataGaLocation":455},"view plans",{"text":486,"config":487},"Why Premium?",{"href":488,"dataGaName":489,"dataGaLocation":455},"/pricing/premium/","why premium",{"text":491,"config":492},"Why Ultimate?",{"href":493,"dataGaName":494,"dataGaLocation":455},"/pricing/ultimate/","why ultimate",{"title":496,"links":497},"Solutions",[498,503,506,508,513,518,522,525,529,534,536,539,542,547],{"text":499,"config":500},"Digital transformation",{"href":501,"dataGaName":502,"dataGaLocation":455},"/solutions/digital-transformation/","digital transformation",{"text":134,"config":504},{"href":129,"dataGaName":505,"dataGaLocation":455},"security & compliance",{"text":123,"config":507},{"href":105,"dataGaName":106,"dataGaLocation":455},{"text":509,"config":510},"Agile development",{"href":511,"dataGaName":512,"dataGaLocation":455},"/solutions/agile-delivery/","agile delivery",{"text":514,"config":515},"Cloud transformation",{"href":516,"dataGaName":517,"dataGaLocation":455},"/solutions/cloud-native/","cloud transformation",{"text":519,"config":520},"SCM",{"href":119,"dataGaName":521,"dataGaLocation":455},"source code management",{"text":109,"config":523},{"href":111,"dataGaName":524,"dataGaLocation":455},"continuous integration & delivery",{"text":526,"config":527},"Value stream management",{"href":161,"dataGaName":528,"dataGaLocation":455},"value stream management",{"text":530,"config":531},"GitOps",{"href":532,"dataGaName":533,"dataGaLocation":455},"/solutions/gitops/","gitops",{"text":171,"config":535},{"href":173,"dataGaName":174,"dataGaLocation":455},{"text":537,"config":538},"Small business",{"href":178,"dataGaName":179,"dataGaLocation":455},{"text":540,"config":541},"Public sector",{"href":183,"dataGaName":184,"dataGaLocation":455},{"text":543,"config":544},"Education",{"href":545,"dataGaName":546,"dataGaLocation":455},"/solutions/education/","education",{"text":548,"config":549},"Financial services",{"href":550,"dataGaName":551,"dataGaLocation":455},"/solutions/finance/","financial services",{"title":191,"links":553},[554,556,558,560,563,565,567,569,571,573,575,577,579],{"text":203,"config":555},{"href":205,"dataGaName":206,"dataGaLocation":455},{"text":208,"config":557},{"href":210,"dataGaName":211,"dataGaLocation":455},{"text":213,"config":559},{"href":215,"dataGaName":216,"dataGaLocation":455},{"text":218,"config":561},{"href":220,"dataGaName":562,"dataGaLocation":455},"docs",{"text":241,"config":564},{"href":243,"dataGaName":244,"dataGaLocation":455},{"text":236,"config":566},{"href":238,"dataGaName":239,"dataGaLocation":455},{"text":246,"config":568},{"href":248,"dataGaName":249,"dataGaLocation":455},{"text":259,"config":570},{"href":261,"dataGaName":262,"dataGaLocation":455},{"text":251,"config":572},{"href":253,"dataGaName":254,"dataGaLocation":455},{"text":264,"config":574},{"href":266,"dataGaName":267,"dataGaLocation":455},{"text":269,"config":576},{"href":271,"dataGaName":272,"dataGaLocation":455},{"text":274,"config":578},{"href":276,"dataGaName":277,"dataGaLocation":455},{"text":279,"config":580},{"href":281,"dataGaName":282,"dataGaLocation":455},{"title":297,"links":582},[583,585,587,589,591,593,595,600,605,607,609,611],{"text":304,"config":584},{"href":306,"dataGaName":299,"dataGaLocation":455},{"text":309,"config":586},{"href":311,"dataGaName":312,"dataGaLocation":455},{"text":317,"config":588},{"href":319,"dataGaName":320,"dataGaLocation":455},{"text":322,"config":590},{"href":324,"dataGaName":325,"dataGaLocation":455},{"text":327,"config":592},{"href":329,"dataGaName":330,"dataGaLocation":455},{"text":332,"config":594},{"href":334,"dataGaName":335,"dataGaLocation":455},{"text":596,"config":597},"Environmental, social and governance (ESG)",{"href":598,"dataGaName":599,"dataGaLocation":455},"/environmental-social-governance/","environmental, social and governance",{"text":601,"config":602},"Diversity, inclusion and belonging (DIB)",{"href":603,"dataGaName":604,"dataGaLocation":455},"/diversity-inclusion-belonging/","Diversity, inclusion and belonging",{"text":337,"config":606},{"href":339,"dataGaName":340,"dataGaLocation":455},{"text":347,"config":608},{"href":349,"dataGaName":350,"dataGaLocation":455},{"text":352,"config":610},{"href":354,"dataGaName":355,"dataGaLocation":455},{"text":612,"config":613},"Modern Slavery Transparency Statement",{"href":614,"dataGaName":615,"dataGaLocation":455},"https://handbook.gitlab.com/handbook/legal/modern-slavery-act-transparency-statement/","modern slavery transparency statement",{"title":617,"links":618},"Contact Us",[619,622,624,626,631,636,641],{"text":620,"config":621},"Contact an expert",{"href":37,"dataGaName":38,"dataGaLocation":455},{"text":366,"config":623},{"href":368,"dataGaName":369,"dataGaLocation":455},{"text":371,"config":625},{"href":373,"dataGaName":374,"dataGaLocation":455},{"text":627,"config":628},"Status",{"href":629,"dataGaName":630,"dataGaLocation":455},"https://status.gitlab.com/","status",{"text":632,"config":633},"Terms of use",{"href":634,"dataGaName":635,"dataGaLocation":455},"/terms/","terms of use",{"text":637,"config":638},"Privacy statement",{"href":639,"dataGaName":640,"dataGaLocation":455},"/privacy/","privacy statement",{"text":642,"config":643},"Cookie preferences",{"dataGaName":644,"dataGaLocation":455,"id":645,"isOneTrustButton":91},"cookie preferences","ot-sdk-btn",{"items":647},[648,650,652],{"text":632,"config":649},{"href":634,"dataGaName":635,"dataGaLocation":455},{"text":637,"config":651},{"href":639,"dataGaName":640,"dataGaLocation":455},{"text":642,"config":653},{"dataGaName":644,"dataGaLocation":455,"id":645,"isOneTrustButton":91},"content:shared:en-us:main-footer.yml","Main Footer","shared/en-us/main-footer.yml","shared/en-us/main-footer",[659,672,684,695,706,718,729,740,751,761],{"_path":660,"_dir":661,"_draft":6,"_partial":6,"_locale":7,"seo":662,"content":665,"config":666,"_id":669,"_type":16,"title":663,"_source":17,"_file":670,"_stem":671,"_extension":20},"/en-us/blog/categories/agile-planning","categories",{"title":663,"description":664},"Agile Planning","Browse articles related to Agile Planning on the GitLab Blog",{"name":663},{"template":667,"slug":668,"hide":6},"BlogCategory","agile-planning","content:en-us:blog:categories:agile-planning.yml","en-us/blog/categories/agile-planning.yml","en-us/blog/categories/agile-planning",{"_path":673,"_dir":661,"_draft":6,"_partial":6,"_locale":7,"seo":674,"content":677,"config":678,"_id":680,"_type":16,"title":681,"_source":17,"_file":682,"_stem":683,"_extension":20},"/en-us/blog/categories/ai-ml",{"title":675,"description":676},"AI/ML","Browse articles related to AI/ML on the GitLab Blog",{"name":675},{"template":667,"slug":679,"hide":6},"ai-ml","content:en-us:blog:categories:ai-ml.yml","Ai Ml","en-us/blog/categories/ai-ml.yml","en-us/blog/categories/ai-ml",{"_path":685,"_dir":661,"_draft":6,"_partial":6,"_locale":7,"seo":686,"content":689,"config":690,"_id":692,"_type":16,"title":687,"_source":17,"_file":693,"_stem":694,"_extension":20},"/en-us/blog/categories/bulletin-board",{"title":687,"description":688},"Bulletin Board","Browse articles related to Bulletin Board on the GitLab Blog",{"name":687},{"template":667,"slug":691,"hide":6},"bulletin-board","content:en-us:blog:categories:bulletin-board.yml","en-us/blog/categories/bulletin-board.yml","en-us/blog/categories/bulletin-board",{"_path":696,"_dir":661,"_draft":6,"_partial":6,"_locale":7,"seo":697,"content":700,"config":701,"_id":703,"_type":16,"title":698,"_source":17,"_file":704,"_stem":705,"_extension":20},"/en-us/blog/categories/customer-stories",{"title":698,"description":699},"Customer Stories","Browse articles related to Customer Stories on the GitLab Blog",{"name":698},{"template":667,"slug":702,"hide":6},"customer-stories","content:en-us:blog:categories:customer-stories.yml","en-us/blog/categories/customer-stories.yml","en-us/blog/categories/customer-stories",{"_path":707,"_dir":661,"_draft":6,"_partial":6,"_locale":7,"seo":708,"content":711,"config":712,"_id":714,"_type":16,"title":715,"_source":17,"_file":716,"_stem":717,"_extension":20},"/en-us/blog/categories/devsecops",{"title":709,"description":710},"DevSecOps","Browse articles related to DevSecOps on the GitLab Blog",{"name":709},{"template":667,"slug":713,"hide":6},"devsecops","content:en-us:blog:categories:devsecops.yml","Devsecops","en-us/blog/categories/devsecops.yml","en-us/blog/categories/devsecops",{"_path":719,"_dir":661,"_draft":6,"_partial":6,"_locale":7,"seo":720,"content":723,"config":724,"_id":726,"_type":16,"title":721,"_source":17,"_file":727,"_stem":728,"_extension":20},"/en-us/blog/categories/engineering",{"title":721,"description":722},"Engineering","Browse articles related to Engineering on the GitLab Blog",{"name":721},{"template":667,"slug":725,"hide":6},"engineering","content:en-us:blog:categories:engineering.yml","en-us/blog/categories/engineering.yml","en-us/blog/categories/engineering",{"_path":730,"_dir":661,"_draft":6,"_partial":6,"_locale":7,"seo":731,"content":734,"config":735,"_id":737,"_type":16,"title":732,"_source":17,"_file":738,"_stem":739,"_extension":20},"/en-us/blog/categories/news",{"title":732,"description":733},"News","Browse articles related to News on the GitLab Blog",{"name":732},{"template":667,"slug":736,"hide":6},"news","content:en-us:blog:categories:news.yml","en-us/blog/categories/news.yml","en-us/blog/categories/news",{"_path":741,"_dir":661,"_draft":6,"_partial":6,"_locale":7,"seo":742,"content":745,"config":746,"_id":748,"_type":16,"title":743,"_source":17,"_file":749,"_stem":750,"_extension":20},"/en-us/blog/categories/open-source",{"title":743,"description":744},"Open Source","Browse articles related to Open Source on the GitLab Blog",{"name":743},{"template":667,"slug":747,"hide":6},"open-source","content:en-us:blog:categories:open-source.yml","en-us/blog/categories/open-source.yml","en-us/blog/categories/open-source",{"_path":752,"_dir":661,"_draft":6,"_partial":6,"_locale":7,"seo":753,"content":755,"config":756,"_id":758,"_type":16,"title":90,"_source":17,"_file":759,"_stem":760,"_extension":20},"/en-us/blog/categories/product",{"title":90,"description":754},"Browse articles related to Product on the GitLab Blog",{"name":90},{"template":667,"slug":757,"hide":6},"product","content:en-us:blog:categories:product.yml","en-us/blog/categories/product.yml","en-us/blog/categories/product",{"_path":762,"_dir":661,"_draft":6,"_partial":6,"_locale":7,"seo":763,"content":765,"config":766,"_id":768,"_type":16,"title":125,"_source":17,"_file":769,"_stem":770,"_extension":20},"/en-us/blog/categories/security",{"title":125,"description":764},"Browse articles related to Security on the GitLab Blog",{"name":125},{"template":667,"slug":767,"hide":6},"security","content:en-us:blog:categories:security.yml","en-us/blog/categories/security.yml","en-us/blog/categories/security",[772,813,853,890,931,971,1010,1049,1086,1126],{"category":663,"slug":668,"posts":773},[774,789,801],{"content":775,"config":786},{"title":776,"description":777,"authors":778,"heroImage":780,"date":781,"body":782,"category":668,"tags":783},"Introducing GitLab’s new Planner role for Agile planning teams","Learn how GitLab’s new Planner role helps Agile teams manage planning workflows, with tailored access across SaaS, Dedicated, and Self-managed solutions.",[779],"Amanda Rueda","https://res.cloudinary.com/about-gitlab-com/image/upload/v1749662488/Blog/Hero%20Images/blog-image-template-1800x945__3_.png","2024-11-25","GitLab launched a new role within the DevSecOps platform – the Planner. Built to align with GitLab’s strategy of providing flexible, role-based access controls, as demonstrated by the release of [custom roles](https://docs.gitlab.com/ee/user/custom_roles.html), the Planner role gives software development teams and planning-focused users access to the tools they need to manage Agile workflows without over-provisioning permissions that could introduce unnecessary risks. By tailoring access to meet specific user needs, the Planner role ensures teams can stay productive while maintaining security and compliance, adhering to the [principle of least privilege](https://about.gitlab.com/blog/the-ultimate-guide-to-least-privilege-access-with-gitlab/).\n\n## Why we created the Planner role\n\nOur journey to this new role started with feedback from our customers and internal teams. We consistently heard that while GitLab offers comprehensive tools for planning and managing Agile development cycles, there was a need for more specific role-based access controls. Product managers, project leads, and other planning roles often required access to planning features but didn’t need full development permissions. In fact, giving them broader access is undesirable, as it increases security risks and potential for errors, such as making unintended changes to code or sensitive configurations. We listened.\n\nThrough user interviews, competitive analysis, and extensive research, we validated the need for a role that grants full access to planning tools while maintaining security by restricting access to developer-centric features.\n\n## What does the Planner role offer?\n\nThe Planner role is a hybrid of the existing [Guest and Reporter roles](https://docs.gitlab.com/ee/user/permissions.html#roles) but designed specifically for those who need access to planning workflows. \n\nHere’s what you can expect:\n\n* Access to key planning tools like epics, roadmaps, issue boards, and [OKRs](https://docs.gitlab.com/ee/user/okrs.html) (*some features may require a GitLab Premium or Ultimate license*)  \n* Enhanced security by limiting unnecessary access to sensitive development features  \n* The Planner role can be used in conjunction with the Enterprise Agile Planning add-on, providing teams with tailored access to planning tools while maintaining security and control.  (*however, the Planner role itself is available on all license tiers*).\n\nThe Planner role is available across all GitLab solutions, including SaaS, GitLab Dedicated, and Self-managed, ensuring that all customers can benefit from this tailored access.\n\nThis role gives teams the flexibility to align permissions with job functions, creating a balance between accessibility and security.\n\n## How the Planner role supports Agile practices\n\nIn [Agile software development](https://about.gitlab.com/blog/categories/agile-planning/), ensuring that each team member has the right tools and permissions to perform their role is crucial for workflow efficiency. The Planner role supports this by allowing planning team members to fully participate in the planning stages of the software development lifecycle without the risk of overstepping into areas like development or deployment.\n\nFrom creating and managing epics to defining roadmaps, the Planner role gives Agile teams the tools they need to stay aligned and productive.\n\n## Customer-centric design\n\nWe didn’t create this role in isolation. We involved our community in the process every step of the way. Through surveys, interviews, and testing, we fine-tuned the permissions to make sure they fit the real-world needs of product and project managers.\n\nThe role also aligns with GitLab’s long-standing mission to be a platform for enterprise Agile teams, giving businesses the flexibility and control to implement Agile methodologies at scale.\n\n## Community feedback and engagement \n\nWe value your input and encourage you to share your experiences with the new Planner role. Your feedback is essential to help refine and improve your GitLab experience. Please visit our [feedback issue](https://gitlab.com/gitlab-org/gitlab/-/issues/503817) to provide your thoughts and suggestions.\n\n## Start planning with GitLab today!\n\nThe Planner role is just one of the many ways GitLab empowers software development teams to plan, collaborate, and deliver efficiently. Whether you’re looking to streamline your product management workflows, improve team collaboration, or align your Agile practices, GitLab has the tools to help you succeed.\n\n> Ready to experience the full power of GitLab? [Sign up for a free 60-day GitLab Ultimate trial](https://about.gitlab.com/free-trial/) and start planning your next project with the Planner role, tailored to fit your team's unique needs.\n\n## Read more\n- [Beyond Devs: GitLab Enterprise Agile Planning add-on for all roles](https://about.gitlab.com/blog/gitlab-enterprise-agile-planning-add-on-for-all-roles/)\n- [How to use GitLab for Agile software development](https://about.gitlab.com/blog/gitlab-for-agile-software-development/)\n- [First look: The new Agile planning experience in GitLab](https://about.gitlab.com/blog/first-look-the-new-agile-planning-experience-in-gitlab/)",[784,475,785,757],"agile","features",{"slug":787,"featured":91,"template":788},"introducing-gitlabs-new-planner-role-for-agile-planning-teams","BlogPost",{"content":790,"config":799},{"title":791,"description":792,"authors":793,"heroImage":795,"date":796,"body":797,"category":668,"tags":798},"Seamlessly migrate from Jira to GitLab with Jira2Lab at scale","Discover how Jira2GitLab simplifies large-scale Jira-to-GitLab migrations by handling complex data transfers, improving scalability, and ensuring efficient integration.",[794],"Maximilien Belinga","https://res.cloudinary.com/about-gitlab-com/image/upload/v1749663129/Blog/Hero%20Images/blog-image-template-1800x945__28_.png","2024-10-10","[Atlassian Server reached end of life in February](https://about.gitlab.com/move-to-gitlab-from-atlassian/), prompting many customers to explore alternatives like Atlassian Cloud or Data Center. However, enterprises using Atlassian Server are increasingly seeking Agile planning solutions that offer more flexibility, cost-efficiency, and robust DevSecOps integration. They also need to tackle challenges related to data volume, customization, user mapping, performance, and data integrity during migration. This is where [GitLab’s Jira2Lab](https://gitlab.com/gitlab-org/professional-services-automation/tools/migration/jira2lab) comes into play, offering a seamless solution for large-scale Jira migrations to GitLab, while providing full CI/CD integration.\n\n## The problem with large-scale Jira migrations\n\nMigrating from Jira to GitLab can be a significant hurdle, especially for enterprises with complex workflows and thousands of issues to move. Here are the most common challenges faced during such migrations:\n\n- **Massive data migration:** As the number of issues, attachments, comments, and projects increases, so does the complexity of migrating them without performance issues or data loss.\n\n- **Custom fields and workflows:** Jira instances often contain custom workflows, fields, and issue types that do not have a one-to-one mapping in GitLab. This gap creates friction during migration, as existing tools often require manual intervention to translate these elements.\n\n- **Lack of full DevSecOps integration:** While many migration tools handle project management data, they do not integrate GitLab’s full DevSecOps capabilities. As a result, teams are left to manually configure their [CI/CD](https://about.gitlab.com/topics/ci-cd/) pipelines and source control management systems after the migration.\n\n## Introducing Jira2Lab\n\nJira2Lab was designed from the ground up to solve the specific challenges of migrating from Jira to GitLab at scale. It’s not just about moving data; it’s about enabling teams to seamlessly transition into GitLab’s powerful DevSecOps environment without downtime or data loss.\n\n### Key features of Jira2Lab\n\n1. Efficient data handling at scale\u003Cbr> \nJira2Lab is optimized to handle thousands of issues, attachments, comments, and custom fields across multiple projects without sacrificing performance. It scales effortlessly to accommodate even the largest enterprise migrations.\n\n2. Custom workflow and field mapping\u003Cbr>\nOne of the standout features of Jira2Lab is its ability to automatically map custom workflows and fields from Jira to GitLab. The tool provides a flexible mapping configuration that eliminates the need for manual intervention during the migration process, making sure everything moves smoothly from Jira to GitLab.\n\n3. CI/CD pipeline integration\u003Cbr>\nJira2Lab doesn’t just migrate your issues and projects — it integrates GitLab’s full CI/CD pipeline into the migration process. This ensures that development teams can start using GitLab’s DevSecOps features, such as automated testing and deployment pipelines, immediately after migration.\n\n4. Pilot migrations\u003Cbr>\nOur tool supports pilot migrations to allow teams to test their configurations and workflows before scaling up. This ensures that any issues can be caught early, preventing disruptions during the full migration.\n\n5. Real-time monitoring\u003Cbr>\nThe tool provides real-time monitoring and logs during migration, giving complete transparency to ensure every step is performed correctly and without errors.\n\n6. Customizable and flexible\u003Cbr>\nEven if your Jira instance has unique configurations or workflows, Jira2Lab offers the flexibility to customize the migration according to your specific requirements, ensuring nothing is lost in translation.\n\n### Feature comparison: Jira vs. GitLab\n\nMigrating from Jira to GitLab helps consolidate workflows and unlock advanced features native to GitLab. Here’s a quick comparison of the core features of both platforms:\n\n| **Feature**             | **Jira**                        | **GitLab**                    |\n|-------------------------|----------------------------------|-------------------------------|\n| **Issue Tracking**       | Yes (Highly customizable)       | Yes (Integrated with DevSecOps)   |\n| **Agile Boards**         | Yes (Kanban, Scrum)             | Yes (Issue Boards, Milestones) |\n| **CI/CD**                | No (Requires external tools)    | Yes (Built-in CI/CD)           |\n| **Source Control**       | No (Requires GitHub/Bitbucket)  | Yes (Native Git support)       |\n| **DevSecOps Tools**         | Limited integrations            | Full DevSecOps lifecycle          |\n\nWith Jira2Lab, we ensure that all critical aspects, from issue tracking to CI/CD pipelines, are transitioned smoothly, taking full advantage of GitLab’s integrated approach to development and operations.\n\n## The migration methodology\n\nJira2Lab follows a structured, five-phase migration methodology, ensuring seamless transition with minimal disruption:\n\n### 1. Discovery and planning\n\nWe start by thoroughly understanding the customer’s Jira setup, identifying all necessary custom workflows, fields, and projects that need to be migrated. This phase also involves a gap analysis to compare Jira and GitLab features and map out the migration process.\n\n### 2. Setup\nIn this phase, we configure the migration tool and set up the necessary environments for both Jira and GitLab. This includes verifying all permissions and setting up a backup of Jira data before the migration begins.\n\n### 3. Pilot migrations\nBefore migrating the entire dataset, we run pilot migrations on selected projects to test the migration process, workflows, and data integrity. This allows us to identify and resolve any issues early in the process.\n\n### 4. Scaled migrations\nAfter validating the pilot migration, we scale the migration across all projects, ensuring minimal downtime and smooth transitions for development teams.\n\n### 5. Wrap-up and post-migration support\nOnce the migration is complete, we provide ongoing support, ensuring all teams are fully operational in GitLab. This phase also includes user training and the decommissioning of the Jira instance, if required.\n\n## Case study: Tackling scale with Jira2Lab\n\nIn a recent migration, a large enterprise faced the challenge of migrating over 20,000 issues across 50 projects from Jira to GitLab. The project had highly customized workflows and thousands of comments and attachments that needed to be transferred.\n\nWith Jira2Lab, we were able to:\n\n- Migrate all data, including custom fields, without any data loss.\n- Set up CI/CD pipelines within GitLab so that teams could immediately continue their work post-migration.\n- Conduct a pilot migration of two projects, which allowed us to identify and fix minor workflow discrepancies before scaling up to the entire organization.\n\nThe result was a seamless transition to GitLab, with the entire process completed within the planned timeline and no significant downtime.\n\n## Get started with Jira2Lab today\n\nJira2Lab stands out in the market by addressing the limitations that other migration tools cannot handle. It is designed specifically for large-scale migrations and can integrate with GitLab’s full DevSecOps lifecycle, unlike most tools that only handle project management data. The tool’s ability to map custom workflows and integrate CI/CD pipelines makes it the perfect solution for enterprises looking to enhance their development workflows while migrating to GitLab.\n\n> Ready to scale your development processes with GitLab? Explore our [Professional Services catalog](https://about.gitlab.com/services/catalog/) to learn how we can help your team migrate efficiently and effectively. Contact us through the form at the end for a personalized demo of GitLab's Jira2Lab.\n",[784,109,709,785,757],{"slug":800,"featured":91,"template":788},"seamlessly-migrate-from-jira-to-gitlab-with-jira2lab-at-scale",{"content":802,"config":811},{"title":803,"description":804,"authors":805,"heroImage":807,"date":808,"body":809,"category":668,"tags":810},"Get to know the GitLab Wiki for effective knowledge management","The GitLab Wiki helps organizations benefit from Agile planning and knowledge management. Learn best practices for using this powerful tool in your DevSecOps environment.",[806],"Matthew Macfarlane","https://res.cloudinary.com/about-gitlab-com/image/upload/v1749660011/Blog/Hero%20Images/blog-image-template-1800x945__21_.png","2024-07-17","Effective knowledge management is a cornerstone of Agile planning success for organizations across all industries. The [GitLab Wiki](https://docs.gitlab.com/ee/user/project/wiki/), part of the GitLab DevSecOps platform, is a powerful tool that supports this endeavor. With the GitLab Wiki, teams can streamline [Agile planning](https://about.gitlab.com/solutions/agile-delivery/) processes, enhance collaboration, and ensure that valuable information is accessible and up to date, all within a single platform. In this article, you will learn how to harness the GitLab Wiki for effective knowledge management.\n\n## What is the GitLab Wiki?\n\nThe GitLab Wiki is available at both the Project and Group levels, and allows teams to create, organize, and share documentation directly within their GitLab instances. It's a flexible and user-friendly wiki that supports multiple formats, including Markdown, RDoc, AsciiDoc, and Org, making it easy to present information in a readable manner.\n\n## Benefits of using GitLab Wiki for knowledge management\n\nThere are numerous benefits to using the GitLab Wiki for knowledge management as part of an overall Agile planning strategy. Here are four examples:\n\n1. **Centralized information repository:** The GitLab Wiki serves as a single source of truth where all knowledge can be stored. This centralization ensures that team members can easily find the information they need without having to leave the platform for an external solution.\n2. **Collaboration and accessibility:** Being integrated into GitLab helps to foster collaboration and ensures that everyone has access to the latest information.\n3. **Version control:** The GitLab Wiki is bolstered by GitLab's robust version control system. This means every change is tracked, and previous versions can be restored if necessary. This is crucial for maintaining the integrity of documentation over time.\n4. **Templates:** Templates ensure that the content across different pages maintains a consistent format and style, making the documentation more professional and easier to navigate. Templates also save time by providing a predefined structure that can be reused. This reduces the effort required to create new pages or update existing ones.\n\n## Best practices for knowledge management in the GitLab Wiki\n\nHere are five best practices to follow when using the GitLab Wiki:\n\n1. **Organize content logically:** Structure your wiki with clear, logical pages and sub-pages. Use categories to group related information and ensure that the hierarchy is intuitive. This makes it easier for users to navigate and find what you need.\n2. **Standardize documentation practices:** Establish and enforce guidelines for documentation, including consistent formatting, naming conventions, and content structure. Templates, as mentioned before, can help with this.\n3. **Perform regular updates and reviews:** Unlike an issue or epic, wiki pages never close. To ensure the accuracy of wiki pages it's important to schedule regular reviews and updates. Encourage team members to contribute updates as they encounter changes in their work.\n4. **Encourage collaboration:** Foster a culture where team members are encouraged to contribute to the Wiki. This could be through regular knowledge-sharing sessions, incentives for contributions, or integrating documentation updates into daily workflows.\n5. **Link wiki pages to issues and epics:** Use GitLab’s auto-complete capability to link wiki pages with issues and epics. Linking can help your team better locate and reference information throughout a project's lifecycle.\n\n## Get started with the GitLab Wiki\n\nThe GitLab Wiki helps organizations follow Agile planning principles and attain effective knowledge management by ensuring documentation remains a valuable resource. Leveraging GitLab Wiki for knowledge management not only enhances productivity but also fosters a culture of continuous learning and improvement.\n\nIncorporate these strategies into your GitLab Wiki practice and watch as your team's efficiency and collaboration reach new heights. To learn more about the GitLab Wiki, check out [our documentation wiki](https://docs.gitlab.com/ee/user/project/wiki/).\n\n> Try the GitLab Wiki for yourself with a [free 30-day trial of GitLab Ultimate](https://gitlab.com/-/trial_registrations/new?glm_source=about.gitlab.com/blog&glm_content=default-saas-trial).",[784,475,757],{"slug":812,"featured":6,"template":788},"get-to-know-the-gitlab-wiki-for-effective-knowledge-management",{"category":675,"slug":679,"posts":814},[815,830,841],{"content":816,"config":828},{"title":817,"description":818,"authors":819,"heroImage":821,"date":822,"body":823,"category":679,"tags":824},"Speed up code reviews: Let AI handle the feedback implementation","Discover how GitLab Duo with Amazon Q automates the implementation of code review feedback through AI, transforming a time-consuming manual process into a streamlined workflow.",[820],"Cesar Saavedra","https://res.cloudinary.com/about-gitlab-com/image/upload/v1749659604/Blog/Hero%20Images/Screenshot_2024-11-27_at_4.55.28_PM.png","2025-06-10","You know that feeling when you've just submitted a merge request and the code review comments start rolling in? One reviewer wants the labels updated, another asks for side-by-side layouts, someone else requests bold formatting, and don't forget about that button color change. Before you know it, you're spending hours implementing feedback that, while important, takes you away from building new features. It's a time-consuming process that every developer faces, yet it feels like there should be a better way.\n\nWhat if you could have an AI assistant that understands code review feedback and automatically implements the changes for you? That's exactly what [GitLab Duo with Amazon Q](https://about.gitlab.com/blog/gitlab-duo-with-amazon-q-agentic-ai-optimized-for-aws/) brings to your development workflow. This seamless integration combines GitLab's comprehensive DevSecOps platform with Amazon Q's advanced AI capabilities, creating an intelligent assistant that can read reviewer comments and converts them directly into code changes. Instead of manually addressing each piece of feedback, you can let AI handle the implementation while you focus on the bigger picture.\n\n## How GitLab Duo with Amazon Q works\n\nWhen you're viewing a merge request with reviewer comments, you'll see feedback scattered throughout your code. Let's take the examples from earlier in this article: maybe you've received a request to update a form label here, a suggestion to display fields side-by-side there, or a note about making certain text bold. Each comment represents a task that normally you'd need to handle manually.\n\n![feedback on an MR](https://res.cloudinary.com/about-gitlab-com/image/upload/v1749673634/Blog/Content%20Images/1-show-comment.png)\n\nWith GitLab Duo with Amazon Q, you can simply enter the `/q dev` quick action in a comment. This prompts Amazon Q to analyze all the feedback and start modifying your code automatically. The AI agent understands the context of each comment and implements the requested changes directly in your codebase.\n\n![/q dev function prompting Amazon Q to analyze feedback](https://res.cloudinary.com/about-gitlab-com/image/upload/v1749673634/Blog/Content%20Images/2-invoke-q-dev.png)\n\nOnce Amazon Q processes the feedback, you can view all the updates in the \"Changes\" tab of your merge request. Every modification is clearly visible, so you can verify that the AI agent correctly interpreted and implemented each piece of feedback. You can then run your updated application to confirm that all the changes work as expected — that form label is updated, the fields are displayed side-by-side, the text is bold, and yes, that button is now blue.\n\nWatch the code review feedback process in action:\n\n\u003C!-- blank line -->\n\u003Cfigure class=\"video_container\">\n  \u003Ciframe src=\"https://www.youtube.com/embed/31E9X9BrK5s?si=ThFywR34V3Bfj1Z-\" frameborder=\"0\" allowfullscreen=\"true\"> \u003C/iframe>\n\u003C/figure>\n\u003C!-- blank line -->\n\nProcessing code review feedback is a necessary but time-intensive part of software development.  GitLab Duo with Amazon Q evolves this manual process into an automated workflow, dramatically reducing the time between receiving feedback and implementing changes. By letting AI handle these routine modifications, you're free to focus on what really matters — building innovative features and solving complex problems.\n\nWith GitLab Duo with Amazon Q, you can:\n- Eliminate hours of manual feedback implementation\n- Accelerate your code review cycles\n- Maintain consistency in how feedback is addressed\n- Reduce context switching between reviewing comments and writing code\n- Ship features faster with streamlined deployment times\n\n> #### To learn more about GitLab Duo with Amazon Q visit us at an upcoming [AWS Summit in a city near you](https://about.gitlab.com/events/aws-summits/) or [reach out to your GitLab representative](https://about.gitlab.com/partners/technology-partners/aws/#form).\n\n## GitLab Duo with Amazon Q resources\n\n- [GitLab Duo with Amazon Q: Agentic AI optimized for AWS generally available](https://about.gitlab.com/blog/gitlab-duo-with-amazon-q-agentic-ai-optimized-for-aws/)\n- [GitLab and AWS partner page](https://about.gitlab.com/partners/technology-partners/aws/)\n- [GitLab Duo with Amazon Q documentation](https://docs.gitlab.com/user/duo_amazon_q/)\n- [What is agentic AI?](https://about.gitlab.com/topics/agentic-ai/)\n- [Agentic AI guides and resources](https://about.gitlab.com/blog/agentic-ai-guides-and-resources/)",[757,785,475,825,826,827],"AWS","code review","tutorial",{"slug":829,"featured":91,"template":788},"speed-up-code-reviews-let-ai-handle-the-feedback-implementation",{"content":831,"config":839},{"title":832,"description":833,"authors":834,"heroImage":835,"date":836,"body":837,"category":679,"tags":838},"Accelerate code reviews with GitLab Duo and Amazon Q","Use AI-powered agents to optimize code reviews by automatically analyzing merge requests and providing comprehensive feedback on bugs, readability, and coding standards.",[820],"https://res.cloudinary.com/about-gitlab-com/image/upload/v1750096976/Blog/Hero%20Images/Blog/Hero%20Images/Screenshot%202024-11-27%20at%204.55.28%E2%80%AFPM_4VVz6DgGBOvbGY8BUmd068_1750096975734.png","2025-06-02","Code reviews are critical for catching bugs, improving code readability, and maintaining coding standards, but they can also be a major bottleneck in your workflow. When you're trying to ship features quickly, waiting for multiple team members to review your code can be frustrating. The back-and-forth discussions, the scheduling conflicts, and the time it takes to get everyone aligned can stretch what should be a simple review into days or even weeks.\n\nHere's where [GitLab Duo with Amazon Q](https://about.gitlab.com/blog/gitlab-duo-with-amazon-q-agentic-ai-optimized-for-aws/), our new offering that delivers agentic AI throughout the software development lifecycle for AWS customers, comes in to transform your review process. This intelligent, AI-powered solution can perform comprehensive code reviews for you in a fraction of the time it would take your human colleagues. By leveraging advanced agentic AI capabilities, GitLab Duo with Amazon Q streamlines your entire review workflow without sacrificing the quality and thoroughness you need. Think of it as having an always-available, highly skilled reviewer who can instantly analyze your code and provide actionable feedback.\n\n## How it works: Launching a code review\n\nSo how does GitLab Duo with Amazon Q actually work? Let's say you've just finished working on a feature and created a merge request with multiple code updates. Instead of pinging your teammates and waiting for their availability, you simply enter a quick command in the comment section: \"/q review\". That's it – just those two words trigger the AI to spring into action.\n\n![Triggering a code review using GitLab Duo with Amazon Q](https://res.cloudinary.com/about-gitlab-com/image/upload/v1750097002/Blog/Content%20Images/Blog/Content%20Images/image1_aHR0cHM6_1750097002096.png)\n\nOnce you've entered the command, Amazon Q Service immediately begins analyzing your code changes. You'll see a confirmation that the review is underway, and within moments, the AI is examining every line of your updates, checking for potential issues across multiple dimensions.\nWhen the review completes, you receive comprehensive feedback that covers all the bases: bug detection, readability improvements, syntax errors, and adherence to your team's coding standards. The AI doesn't just point out problems, it provides context and suggestions for fixing them, making it easy for you to understand what needs attention and why.\n\nThe beauty of this agentic AI approach is that it handles the heavy lifting of code review while you focus on what matters most: building great software. You get the benefits of thorough code reviews — better bug detection, consistent coding standards, and improved code quality — without the time sink. Your deployment times shrink dramatically because you're no longer waiting in review queues, and your entire team becomes more productive.\n\n## Why use GitLab Duo with Amazon Q?\n\nGitLab Duo with Amazon Q transforms your development workflow in the following ways:\n- Lightning-fast code reviews that don't compromise on quality\n- Consistent application of coding standards across your entire codebase\n- Immediate feedback that helps you fix issues before they reach production\n- Reduced deployment times that let you ship features faster\n- More time for your team to focus on creative problem-solving instead of repetitive reviews\n\nReady to see this game-changing feature in action? Watch how GitLab Duo with Amazon Q can revolutionize your code review process:\n\n\u003C!-- blank line -->\n\u003Cfigure class=\"video_container\">\n  \u003Ciframe src=\"https://www.youtube.com/embed/4gFIgyFc02Q?si=GXVz--AIrWiwzf-I\" frameborder=\"0\" allowfullscreen=\"true\"> \u003C/iframe>\n\u003C/figure>\n\u003C!-- blank line -->\n\n> To learn more about GitLab Duo with Amazon Q visit us at an upcoming [AWS Summit in a city near you](https://about.gitlab.com/events/aws-summits/) or [reach out to your GitLab representative](https://about.gitlab.com/partners/technology-partners/aws/#form).\n> \n> And make sure to join the GitLab 18 virtual launch event to learn about our agentic AI plans and more. [Register today!](https://about.gitlab.com/eighteen/)",[675,475,826,757,785,282,825,827],{"slug":840,"featured":91,"template":788},"accelerate-code-reviews-with-gitlab-duo-and-amazon-q",{"content":842,"config":851},{"title":843,"description":844,"authors":845,"heroImage":847,"date":848,"body":849,"category":679,"tags":850},"GitLab Duo Chat gets agentic AI makeover  ","Our new Duo Chat experience, currently an experimental release, helps developers onboard to projects, understand assignments, implement changes, and more.",[846],"Torsten Linz","https://res.cloudinary.com/about-gitlab-com/image/upload/v1750099203/Blog/Hero%20Images/Blog/Hero%20Images/blog-image-template-1800x945%20%2820%29_2bJGC5ZP3WheoqzlLT05C5_1750099203484.png","2025-05-29","Generative AI chat assistants have become standard in software development, helping create and fix code just to start. But what if your chat assistant could understand the artifacts of your entire development process, not just your code? What if that chat assistant could help you work through issues and project documentation before it helps you write code, and could access CI/CD pipelines and merge requests to help you finish coding tasks properly? \n\n**Meet the next generation of GitLab Duo Chat – GitLab Duo Agentic Chat, a significant evolution in AI-native development assistance and the newest addition to our platform, now in [experimental release](https://docs.gitlab.com/policy/development_stages_support/#experiment).** GitLab Duo Agentic Chat is currently available as an experimental feature in VS Code to all users on GitLab.com that have any one of these add-ons: Duo Core, Duo Pro, or Duo Enterprise.\n\nAgentic Chat transforms chat from traditional conversational AI to a chat experience that takes action on your behalf, breaking down complex problems into discrete tasks that it can complete. Instead of simply responding to questions with the context you provide, Agentic Chat can:\n\n* **Autonomously determine** what information it needs to answer your questions  \n* **Execute a sequence of operations** to gather that information from multiple sources  \n* **Formulate comprehensive responses** by combining insights from across your project  \n* **Create and modify files** to help you implement solutions\n\nAnd all of this is done while keeping the human developer within the loop.\n\nAgentic Chat is built on the Duo Workflow architecture, which is [currently in private beta](https://about.gitlab.com/blog/gitlab-duo-workflow-enterprise-visibility-and-control-for-agentic-ai/). The architecture comprises agents and tools that take on specific tasks like finding the right context for a given question or editing files. \n\n**Use cases for GitLab Duo Agentic Chat**\n\nHere are some real-world and common use cases for Agentic Chat:\n\n* Onboard to new projects faster by having AI help you familiarize yourself with a new codebase.\n\n* Jump into assigned work immediately, even when issue descriptions are unclear, because Agentic Chat can help you connect the dots between requirements and existing implementations.\n\n* When it's time to make changes, Agentic Chat can handle the implementation work by creating and editing multiple files across your project.\n\n* At release time, Agentic Chat can help you verify that your solution actually addresses the original requirements by analyzing your merge requests against the initial issue or task.\n\n![agentic chat - example](https://res.cloudinary.com/about-gitlab-com/image/upload/v1750099210/Blog/Content%20Images/Blog/Content%20Images/image4_aHR0cHM6_1750099210429.png)\n\n\u003Ccenter>\u003Ci>Agentic Chat making code edits\u003C/i>\u003C/center>\n\n## From learning to shipping: A complete workflow demonstration in four steps\n\nTo show how Agentic Chat transforms the development experience, let's walk through a real scenario from our engineering teams. Imagine you're a new team member who's been assigned an issue but knows nothing about the codebase. You can follow along with this video demonstration:\n\n\u003C!-- blank line -->\n\u003Cfigure class=\"video_container\">\n  \u003Ciframe src=\"https://www.youtube.com/embed/uG9-QLAJrrg?si=kaOhYylMIaWkIuG8j\" frameborder=\"0\" allowfullscreen=\"true\"> \u003C/iframe>\n\u003C/figure>\n\u003C!-- blank line -->\n\n**Step 1: Understand the project**\n\nInstead of manually exploring files and documentation, you can prompt Agentic Chat:\n\n```unset\nI am new to this project. Could you read the project structure and explain it to me?\n```\n\nAgentic Chat provides a comprehensive project overview by:  \n- Exploring the directory structure  \n- Reading README files and documentation  \n- Identifying key components and applications\n\n**Step 2: Understand your assigned task**\n\nNext, you need to understand your specific assignment, so you can enter this prompt:\n\n```unset\nI have been assigned Issue 1119. Could you help me understand this task, specifically where do I need to apply the refactoring?\n```\n\nAgentic Chat explains the task and proposes a refactoring approach by:\n- Retrieving and analyzing the issue details from the remote GitLab server  \n- Examining relevant project files  \n- Identifying the specific locations requiring changes\n\n**Step 3: Implement the solution**\n\nRather than doing the work manually, you can request:\n\n```unset\nCould you make the edits for me? Please start with steps one, two, three.\n```\n\nAgentic Chat then:  \n- Creates new directories and files as needed \n- Extracts and refactors code across multiple locations  \n- Ensures consistency across all modified files  \n- Provides a summary of all changes made\n\n**Step 4: Verify completion**\n\nFinally, after creating your merge request, you can verify your work:\n\n```unset\nDoes my MR fully address Issue 1119? \n```\n\nAgentic Chat confirms whether all requirements have been met by analyzing both your merge request and the original issue.\n\n## Try it today and share your feedback\n\nGitLab Duo Agentic Chat is currently available as an experimental feature in VS Code to all users on GitLab.com that have any one of these add-ons: Duo Core, Duo Pro, or Duo Enterprise. See our [setup documentation](https://docs.gitlab.com/user/gitlab_duo_chat/agentic_chat/) for prerequisites and configuration steps.\n\nAs an experimental feature, Agentic Chat has some known limitations we're actively addressing, including slower response times due to multiple API calls, keyword-based rather than semantic search, and limited support for new local folders or non-GitLab projects. **Your feedback is crucial in helping us prioritize improvements and bring Agentic Chat to general availability so please share your experience in [this issue](https://gitlab.com/gitlab-org/gitlab/-/issues/542198).**\n\n## What's next?\n\nWe are fully focused on improving Agentic Chat, including bringing it to general availability. In the meantime, we are aiming to improve response times and are adding capabilities that GitLab Duo Chat currently has, such as using self-hosted models or supporting JetBrains and Visual Studio in addition to VS Code. Once we have switched Duo Chat to this new architecture we plan to also bring Agentic Chat to the chat in the GitLab web application. We also plan to add a lot more functionality, such as editing GitLab artifacts, supporting context from custom Model Context Protocol, or MCP, servers, and offering commands to run in the terminal.\n\n> Ready to experience autonomous development assistance but not yet a GitLab customer? Try Agentic Chat today as part of [a free, 60-day trial of GitLab Ultimate with Duo Enterprise](https://about.gitlab.com/free-trial/) and help shape the future of AI-powered development. Follow these [setup steps for VS Code](https://docs.gitlab.com/user/gitlab_duo_chat/agentic_chat/#use-agentic-chat-in-vs-code).\n>\n> And make sure to join the GitLab 18 virtual launch event to learn about our agentic AI plans and more. [Register today!](https://about.gitlab.com/eighteen/)\n\n***Disclaimer: This blog contains information related to upcoming products, features, and functionality. It is important to note that the information in this blog post is for informational purposes only. Please do not rely on this information for purchasing or planning purposes. As with all projects, the items mentioned in this blog and linked pages are subject to change or delay. The development, release, and timing of any products, features, or functionality remain at the sole discretion of GitLab.***\n\n## Learn more\n\n- [GitLab Duo Workflow: Enterprise visibility and control for agentic AI](https://about.gitlab.com/blog/gitlab-duo-workflow-enterprise-visibility-and-control-for-agentic-ai/)\n- [What is agentic AI?](https://about.gitlab.com/topics/agentic-ai/)\n- [Agentic AI guides and resources](https://about.gitlab.com/blog/agentic-ai-guides-and-resources/)\n",[675,736,785,475,757,827],{"slug":852,"featured":91,"template":788},"gitlab-duo-chat-gets-agentic-ai-makeover",{"category":687,"slug":691,"posts":854},[855,867,878],{"content":856,"config":865},{"title":857,"description":858,"authors":859,"heroImage":861,"date":862,"body":863,"category":691,"tags":864},"GitLab rotating Omnibus Linux package signing key","Learn who is impacted by the rotation of the GNU Privacy Guard (GPG) key and what you need to know.",[860],"GitLab","https://res.cloudinary.com/about-gitlab-com/image/upload/v1749663000/Blog/Hero%20Images/tanukilifecycle.png","2025-04-16","As part of our standard security practices, GitLab is rotating the GNU Privacy Guard (GPG) key used to sign all Omnibus Linux packages on April 16, 2025. This key ensures the integrity of our packages, verifying that they have not been tampered with after creation in our CI pipelines. This key is distinct from the repository metadata signing key used by package managers and the GPG signing key for the GitLab Runner. GitLab is revoking the existing key and will begin signing upcoming packages using a new key with fingerprint `98BF DB87 FCF1 0076 416C 1E0B AD99 7ACC 82DD 593D`.\n\n**What do I need to do?**\n\nIf you currently validate the GPG signatures of GitLab Omnibus packages, you will need to update your copy of the package signing key. Packages published before this article will remain signed with the previous key.\n\nThe package signing key is separate from the repository metadata signing key used by your operating system’s package managers (like `apt` or `yum`). Unless you are specifically verifying package signatures or have configured your package manager to verify the package signatures, no action is required to continue installing GitLab Omnibus packages.\n\n**Where can I find the new key?**\n\nThe new key can be downloaded from `packages.gitlab.com` using the URL:\n\n[https://packages.gitlab.com/gitlab/gitlab-ee/gpgkey/gitlab-gitlab-ee-CB947AD886C8E8FD.pub.gpg](https://packages.gitlab.com/gitlab/gitlab-ee/gpgkey/gitlab-gitlab-ee-CB947AD886C8E8FD.pub.gpg)\n\nPlease check the documentation for more information concerning [verification of the package signatures](https://docs.gitlab.com/omnibus/update/package_signatures#package-signatures).\n\n**What do I do if I still have problems?**\n\nPlease open an issue in the [omnibus-gitlab issue tracker](https://gitlab.com/gitlab-org/omnibus-gitlab/-/issues/new?issue&issuable_template=Bug).",[475,757,767],{"slug":866,"featured":6,"template":788},"gitlab-rotating-omnibus-linux-package-signing-key",{"content":868,"config":876},{"title":869,"description":870,"authors":871,"heroImage":780,"date":873,"body":874,"category":691,"tags":875},"Prepare now: Docker Hub rate limits will impact GitLab CI/CD","Learn how Docker Hub's upcoming pull rate limits will affect GitLab pipelines and what you can do to avoid disruptions.",[872],"Tim Rizzi","2025-03-24","On April 1, 2025, Docker will implement new [pull rate limits](https://docs.docker.com/docker-hub/usage/) to Docker Hub that may significantly impact CI/CD pipelines across the industry, including those running on GitLab. The most significant change is the 100 pulls-per-6-hours limit for unauthenticated users.\n\n## What's changing?\n\nStarting April 1, Docker will enforce the following pull rate limits:\n\n| User type | Pull rate limit per hour | Number of public repositories | Number of private repositories |\n|-----------|--------------------------|-------------------------------|--------------------------------|\n| Business, Team, Pro (authenticated) | Unlimited (fair use) | Unlimited | Unlimited |\n| Personal (authenticated) | 200 per 6-hour window | Unlimited | Up to 1 |\n| Unauthenticated users | 100 per 6-hour window per IPv4 address or IPv6 /64 subnet | Not applicable | Not applicable |\n\n\u003Cp>\u003C/p>\nThis is particularly important because:\n\n* GitLab's Dependency Proxy currently pulls from Docker Hub as an unauthenticated user.\n* Most CI/CD pipelines that don't use the Dependency Proxy pull directly from Docker Hub as unauthenticated users.\n* On hosted runners for GitLab.com, multiple users might share the same IP address or subnet, making them collectively subject to this limit.\n\n## How this impacts GitLab users\n\n**Impact on direct Docker Hub pulls**\n\nIf your CI/CD pipelines directly pull images from Docker Hub without authentication, they will be limited to 100 pulls per six-hour window per IP address. For pipelines that run frequently or across multiple projects sharing the same runner infrastructure, this will quickly exhaust the limit and cause pipeline failures.\n\n**Impact on GitLab Dependency Proxy**\n\nThe GitLab Dependency Proxy feature allows you to cache Docker images within GitLab to speed up pipelines and reduce external dependencies. However, the current implementation pulls from Docker Hub as an unauthenticated user, meaning it will also be subject to the 100 pulls-per-6-hours limit.\n\n**Impact on hosted runners**\n\nFor hosted runners on GitLab.com, we use [Google Cloud's pull-through cache](https://cloud.google.com/artifact-registry/docs/pull-cached-dockerhub-images). This mirrors the commonly pulled images and allows us to avoid rate limits. Job images defined as `image:` or `services:` in your `.gitlab-ci.yml` file, are not affected by rate limits.\n\nThings are slightly more challenging whenever images are pulled within the runner environment. The most common use case to pull images during runner runtime is to build an image using Docker-in-Docker or Kaniko. In this scenario, the Docker Hub image defined in your `Dockerfile` is pulled directly from Docker Hub and is likely to be affected by rate limits.\n\n## How GitLab is responding\n\nWe're actively working on solutions to mitigate these challenges:\n\n* **Dependency Proxy authentication:** We've added support for Docker Hub authentication in the [GitLab Dependency Proxy feature](https://gitlab.com/gitlab-org/gitlab/-/issues/331741). This will allow the Dependency Proxy to pull images from Docker Hub as an authenticated user, significantly increasing the rate limits.\n* **Documentation updates:** We've updated our [documentation](https://docs.gitlab.com/user/packages/dependency_proxy/#configure-credentials) to provide clear guidance on configuring pipeline authentication for Docker Hub.\n* **Internal infrastructure preparation:** We're preparing our internal infrastructure to minimize the impact on hosted runners for GitLab.com.\n\n## How you can prepare\n\n**Option 1: Configure Docker Hub authentication in your pipelines**\n\nFor pipelines that pull directly from Docker Hub, you can configure authentication to increase your rate limit to 200 pulls per six-hour window (or unlimited with a paid Docker Hub subscription).\n\nAdd Docker Hub credentials to your project or group CI/CD variables (not in your `.gitlab-ci.yml` file). Please refer to our [documentation on using Docker images](https://docs.gitlab.com/ci/docker/using_docker_images/#use-statically-defined-credentials) for detailed instructions on setting up the `DOCKER_AUTH_CONFIG` CI/CD variable correctly.\n\n**Option 2: Use the GitLab Container Registry**\n\nConsider pushing your frequently used Docker images to your [GitLab Container Registry](https://docs.gitlab.com/user/packages/container_registry/). This eliminates the need to pull from Docker Hub during CI/CD runs:\n\n1. Pull the image from Docker Hub.\n2. Tag it for your GitLab Container Registry.\n3. Push it to your GitLab Container Registry.\n4. Update your pipelines to pull from GitLab Container Registry.\n\n```\ndocker pull busybox:latest\ndocker tag busybox:latest $CI_REGISTRY_IMAGE/busybox:latest\ndocker push $CI_REGISTRY_IMAGE/busybox:latest\n```\n\nThen in your `.gitlab-ci.yml`:\n\n`image: $CI_REGISTRY_IMAGE/busybox:latest`\n\n**Option 3: Use GitLab Dependency Proxy**\n\nGitLab's Dependency Proxy feature provides a way to cache and proxy Docker images, reducing external dependencies and rate limit issues.\n\nCurrent authentication options:\n* GitLab 17.10: Configure Docker Hub authentication for the Dependency Proxy using [GraphQL API](https://docs.gitlab.com/user/packages/dependency_proxy/#configure-credentials-using-the-graphql-api)\n* GitLab 17.11: Use the new UI-based configuration in your group's settings (already available on GitLab.com)\n\nOnce authentication is properly configured, you can:\n\n1. Configure Docker Hub credentials in your group's Dependency Proxy settings:\n  - For GitLab 17.11+ (or current GitLab.com): Navigate to your group's settings > Packages & Registries > Dependency Proxy.\n  - For GitLab 17.10: Use the GraphQL API to configure authentication.\n2. Update your pipelines to use the Dependency Proxy URLs in your CI/CD configuration:\n`image: ${CI_DEPENDENCY_PROXY_GROUP_IMAGE_PREFIX}/busybox:latest`\n\n**Option 4: Consider a Docker Hub paid subscription**\n\nFor organizations with heavy Docker Hub usage, upgrading to a paid Docker subscription (Team or Business) will provide unlimited pulls, which may be the most straightforward solution.\n\n## Best practices to reduce Docker Hub rate limit impact\n\nRegardless of which option you choose, consider these best practices to minimize Docker Hub rate limit impact:\n\n* Use specific image tags instead of `latest` to avoid unnecessary pulls.\n* Consolidate your Docker files to use the same base images across projects.\n* Schedule less critical pipelines to run outside of peak hours.\n* Use caching effectively to avoid pulling the same images repeatedly.\n\n**Note:** According to Docker Hub [documentation](https://docs.docker.com/docker-hub/usage/pulls/#pull-definition), the pull count is incremented when pulling the image manifest, not based on image size or number of layers.\n\n## Timeline and next steps\n\n**Now**\n  * Implement authentication for direct Docker Hub pulls.\n  * GitLab.com users can already configure Docker Hub authentication for the Dependency Proxy using either:\n    * The GraphQL API, or\n    * The UI in group settings\n  * Self-managed GitLab 17.10 users can configure Dependency Proxy authentication using the GraphQL API.\n\n**April 1, 2025**\n  * Docker Hub rate limits go into effect.\n\n**April 17, 2025**\n  * GitLab 17.11 will be released with UI-based Dependency Proxy authentication support for self-managed instances. \n\nWe recommend taking action well before the April 1 deadline to avoid unexpected pipeline failures. For most users, configuring the Dependency Proxy with Docker Hub authentication is the most efficient long-term solution.\n\n> Have questions or need implementation help? Please visit [this issue](https://gitlab.com/gitlab-org/gitlab/-/issues/526605) where our team is actively providing support.",[109,736,475],{"slug":877,"featured":91,"template":788},"prepare-now-docker-hub-rate-limits-will-impact-gitlab-ci-cd",{"content":879,"config":888},{"title":880,"description":881,"authors":882,"heroImage":884,"date":885,"body":886,"category":691,"tags":887},"GitLab achieves PCI DSS Attestation of Compliance","Learn how our completion of the AoC as a Level 1 Service Provider, along with our broader security credentials,  helps us support customers'  compliance efforts.",[883],"Sasha Gazlay","https://res.cloudinary.com/about-gitlab-com/image/upload/v1749673615/Blog/Hero%20Images/blog-image-template-1800x945__4_.png","2025-03-17","Compliance with the [Payment Card Industry (PCI) Data Security Standard (DSS)](https://www.pcisecuritystandards.org/) helps to alleviate security vulnerabilities and protect cardholder data. The PCI standard is required of any enterprise handling credit card and related authentication data or whose services could impact the security of the cardholder data environment. GitLab, as the most comprehensive AI-powered DevSecOps platform, provides the tools and resources to support our customers’ security posture, including those handling PCI-relevant data.\n\nGitLab is pleased to announce the successful achievement of a PCI DSS Attestation of Compliance (AoC) as a Level 1 Service Provider. The AoC comes after an independent Qualified Security Assessor-led validation of our alignment to the PCI DSS. The attestation, in addition to GitLab’s broader compliance credentials, demonstrates the dedication to our mission of being the leading example in security, innovation, and [transparency](https://handbook.gitlab.com/handbook/values/#transparency) in our information security practices.\n\nPlease visit GitLab’s [Trust Center](https://about.gitlab.com/security/) to view our AoC as well as details on shared responsibilities when relying on our attestation. Also, check out [our PCI compliance page](https://about.gitlab.com/compliance/pci-compliance/), which explains how GitLab can support your compliance efforts.",[475,767,736],{"slug":889,"featured":6,"template":788},"gitlab-achieves-pci-dss-attestation-of-compliance",{"category":698,"slug":702,"posts":891},[892,907,919],{"content":893,"config":905},{"title":894,"description":895,"authors":896,"heroImage":898,"date":899,"body":900,"category":702,"tags":901},"The Co-Create Program: How customers are collaborating to build GitLab","Learn how organizations like Thales, Scania, and Kitware are partnering with GitLab engineers to contribute meaningful features that benefit the entire community.",[897],"Fatima Sarah Khalid","https://res.cloudinary.com/about-gitlab-com/image/upload/v1749659756/Blog/Hero%20Images/REFERENCE_-_display_preview_for_blog_images.png","2025-01-30","This past year, over 800 community members have made more than 3,000 contributions to GitLab. These contributors include team members from global organizations like Thales, Scania, and Kitware, who are helping shape GitLab's future through the [Co-Create Program](https://about.gitlab.com/community/co-create/) — GitLab's collaborative development program where customers work directly with GitLab engineers to contribute meaningful features to the platform.\n\nThrough workshops, pair programming sessions, and ongoing support, program participants get hands-on experience with GitLab's architecture and codebase while solving issues or improving existing features.\n\n\"Our experience with the Co-Create Program has been incredible,\" explains Sébastien Lejeune, open source advocate at Thales. \"It only took two months between discussing our contribution with a GitLab Contributor Success Engineer and getting it live in the GitLab release.\"\n\nIn this post, we'll explore how customers have leveraged the Co-Create Program to turn their ideas into code, learning and contributing along the way.\n\n## The Co-Create experience\n[The GitLab Development Kit (GDK)](https://gitlab.com/gitlab-org/gitlab-development-kit) helps contributors get started developing on GitLab. \"The advice I would give new contributors is to remember that you can't break anything with the GDK,\" says Hook. \"If you make a change and it doesn't work, you can undo it or start again. The beauty of GDK is that you can tinker, test, and learn without worrying about the environment.\"\n\nEach participating organization in the Co-Create Program receives support throughout their contribution journey:\n\n- __Technical onboarding workshop__: A dedicated session to set up the GitLab Development Kit (GDK) and understand GitLab's architecture\n- __1:1 engineering support__: Access to GitLab engineers for pair programming and technical guidance\n- __Architecture deep dives__: Focused sessions on specific GitLab components relevant to the issue the organization is contributing to\n- __Code review support__: Detailed feedback and guidance through the merge request process\n- __Regular check-ins__: Ongoing collaboration to ensure progress and address any challenges\n\nThis structure ensures that teams can contribute effectively, regardless of their prior experience with GitLab's codebase or the Ruby/Go programming language. As John Parent from Kitware notes, \"If you've never seen or worked with GitLab before, you're staring at a sophisticated architecture and so much code across different projects. The Co-Create Program helps distill what would take weeks of internal training into a targeted crash course.\"\n\nThe result is a program that not only helps deliver new features but also builds lasting relationships between GitLab and its user community. \"It's inspiring for our engineers to see the passion our customers bring to contributing to and building GitLab together,\" shares Shekhar Patnaik, principal engineer at GitLab. \"Customers get to see the 'GitLab way,' and engineers get to witness their commitment to shaping the future of GitLab.\"\n\n## Enhancing project UX with Thales\nWhen Thales identified opportunities to improve GitLab's empty project UI, they didn't just file a feature request — they built the solution themselves. Their contributions focused on streamlining the new project setup experience by simplifying SSH/HTTPS configuration with a tabbed interface and adding copy/paste functionality for the code snippets. These changes had a significant impact on developer workflows.\n\nThe team's impact extended beyond the UX improvements. Quentin Michaud, PhD fellow for cloud applications on the edge at Thales, contributed to improving the GitLab Development Kit (GDK). As a package maintainer for Arch Linux, Michaud's expertise helped improve GDK's documentation and support its containerization efforts, making it easier for future contributors to get started.\n\n\"My open source experience helped me troubleshoot GDK's support for Linux distros,” says Michaud. “While improving package versioning documentation, I saw that GitLab's Contributor Success team was also working to set up GDK into a container. Seeing our efforts converge was a great moment for me — it showed how open source collaboration can help build better solutions.\"\n\nThe positive experience for the Thales team means that Lejeune now uses the Co-Create Program as \"a powerful example to show our managers the return on investment from open source contributions.\"\n\n## Advancing package support with Scania\nWhen Scania needed advanced package support in GitLab, they saw an opportunity to contribute and build it themselves. \n\n\"As long-time GitLab users who actively promote open source within our organization, the Co-Create Program gave us a meaningful way to contribute directly to open source,\" shares Puttaraju Venugopal Hassan, solution architect at Scania.\n\nThe team started with smaller changes to familiarize themselves with the codebase and review process, then progressed to larger features. \"One of the most rewarding aspects of the Co-Create Program has been looking back at the full, end-to-end process and seeing how far we've come,\" reflects Océane Legrand, software developer at Scania. \"We started with discovery and smaller changes, but we took on larger tasks over time. It's great to see that progression.\" \n\nTheir contributions include bug fixes for the package registry and efforts to enhance the Conan package registry feature set, bringing it closer to general availability (GA) readiness while implementing Conan version 2 support. Their work and collaboration with GitLab demonstrates how the Co-Create Program can drive significant improvements to GitLab’s package registry capabilities.\n\n\"From the start, our experience with the Co-Create Program was very organized. We had training sessions that guided us through everything we needed to contribute. One-on-one sessions with a GitLab engineer also gave us an in-depth look at GitLab’s package architecture, which made the contribution process much smoother,\" said Juan Pablo Gonzalez, software developer at Scania. \n\nThe impact of the program goes beyond code — program participants are also building valuable skills as a direct result of their contributions. In [the GitLab 17.8 release](https://about.gitlab.com/releases/2025/01/16/gitlab-17-8-released/#mvp), both Legrand and Gonzalez were recognized as GitLab MVPs. Legrand talked about how the work she's doing in open source impacts both GitLab and Scania, including building new skills for her and her team: \"Contributing through the Co-Create Program has given me new skills, like experience with Ruby and background migrations. When my team at Scania faced an issue during an upgrade, I was able to help troubleshoot because I'd already encountered it through the Co-Create Program.\"\n\n## Optimizing authentication for high-performance computing with Kitware\nKitware brought specialized expertise from their work with national laboratories to improve GitLab's authentication framework. Their contributions included adding support for the OAuth2 device authorization grant flow in GitLab, as well as implementing new database tables, controllers, views, and documentation. This contribution enhances GitLab's authentication options, making it more versatile for devices without browsers or with limited input capabilities.\n\n\"The Co-Create Program is the most efficient and effective way to contribute to GitLab as an external contributor,\" shares John Parent, R&D engineer at Kitware. \"Through developer pairing sessions, we found better implementations that we might have missed working alone.\"\n\nAs a long-time open source contributor, Kitware particularly appreciated GitLab's approach to development. \"I assumed GitLab wouldn't rely on out-of-the-box solutions at its scale, but seeing them incorporate a Ruby dependency instead of building a custom in-house solution was great,” says Parent. “Coming from the C++ world, where package managers are rare, it was refreshing to see this approach and how straightforward it could be.\"\n\n## Building better together: Benefits of Co-Create\nThe Co-Create Program creates value that flows both ways. \"The program bridges a gap between us as GitLab engineers and our customers,\" explains Imre Farkas, staff backend engineer at GitLab. \"As we work with them, we hear their day-to-day challenges, the parts of GitLab they rely on, and where improvements can be made. It's great to see how enthusiastic they are about getting involved in building GitLab with us.\"\n\nThis collaborative approach also accelerates GitLab's development. As Shekhar Patnaik, principal engineer at GitLab, observes: \"Through Co-Create, our customers are helping us move our roadmap forward. Their contributions allow us to deliver critical features faster, benefitting our entire user base. As the program scales, there's a real potential to accelerate development on our most impactful features by working alongside the very people who rely on them.\"\n\n## Get started with Co-Create\nReady to turn your feature requests into reality? Whether you're looking to enhance GitLab's UI like Thales, improve package support like Scania, or optimize authentication like Kitware, the Co-Create Program welcomes organizations who want to actively shape GitLab's future while building valuable open source experience.\n\nContact your GitLab representative to learn more about participating in the Co-Create Program, or visit our [Co-Create page](https://about.gitlab.com/community/co-create/) for more information.\n",[902,903,904],"contributors","open source","customers",{"slug":906,"featured":91,"template":788},"the-co-create-program-how-customers-are-collaborating-to-build-gitlab",{"content":908,"config":917},{"title":909,"description":910,"authors":911,"heroImage":898,"date":913,"body":914,"category":702,"tags":915},"Kingfisher transforming the developer experience with GitLab","Learn how the international company focuses on DevSecOps, including automation, to reduce complexity in workflows for better efficiency.",[912],"Sharon Gaudin","2024-11-12","Kingfisher plc, an international home improvement company, has leaned into GitLab’s end-to-end platform to help it build a DevSecOps foundation that is revolutionizing its developer experience. And the company plans to continue that improvement by increasing its use of platform features, focusing on security, simplifying its toolchain, and increasing the use of automation.\n\n> \u003Cimg align=\"left\" width=\"200\" height=\"200\" hspace=\"5\" vspace=\"5\" alt=\"Chintan Parmar\" src=\"//images.ctfassets.net/r9o86ar0p03f/3IifVeRyvs3uil81gN02pV/16b6e05652416dc976974b89fbfecb2f/image__1_.png\" style=\"float: left; margin-right: 25px;\"> “The whole point of this is to reduce friction for our engineers, taking away a lot of the complexity in their workflow, and bringing in best practices and governance,” says Chintan Parmar, site reliability engineering manager at Kingfisher. “In terms of what we've done and what we're doing at the moment, it really is about building a foundation in terms of CI/CD and changing the way we deploy to bring in consistency and improve the developer experience.”\n\nParmar talked about his team and their efforts during the [GitLab DevSecOps World Tour event](https://about.gitlab.com/events/devsecops-world-tour/) in London last month. In an on-stage interview with Sherrod Patching, vice president of Customer Success Management at GitLab, he laid out Kingfisher’s journey with the platform, which is enabling its teams, while also making it easier and faster to move software updates and new projects from ideation to deployment.\n\n[Kingfisher](https://www.kingfisher.com/en/index.html) is a parent company with more than 2,000 stores in eight countries across Europe. Listed on the London Stock Exchange and part of the Financial Times Stock Exchange (FTSE) 100 Index, the group reported £13 billion in total revenue in FY 2023/24. Its brands include B&Q, Screwfix, Castorama, and Brico Depot. \n\nThe company first adopted GitLab in 2016, using a free starter license, and then moved to Premium in 2020. In that time, it also has moved from on-premise to a cloud environment, started using shared GitLab runners and source code management, and began building out a CI/CD library that gives team members easy access to standardized and reusable components for typical pipeline stages, such as build, deploy, and test.\n\n## Tracking metrics that execs care about\n\nKingfisher also is tracking metrics, like deployment frequency, lead time to change, and change failure rates, with GitLab. And teams are analyzing value streams, mapping workflows, and finding bottlenecks. All of those metrics are being translated into data that company leaders can sink their teeth into. \n\n“Execs may not care about whether a merge request has been waiting 15 or 20 minutes, but they do care about how we translate that time value into dollars or pounds,” says Parmar, who used GitLab when he previously worked at [Dunelm Group, plc,](https://about.gitlab.com/customers/dunelm/) another major UK-based retailer. “Kingfisher is a very data-driven organization. We are looking to overlay these metrics to see where we can continue to improve our developer experience, eliminating slowdowns and manual tasks, while increasing automation.”\n\nWhile on-stage, Parmar made it clear that all the changes being made are aimed at improving software development and deployment. However, it’s equally paramount to making team members’ jobs easier, giving them more time and autonomy to do the kind of work they enjoy, instead of what can seem like a never-ending stream of repetitive, manual tasks. He noted that the team is so focused on easing workflows and giving engineers more time to be innovative, it has created a “developer experience squad.”\n\n## Putting people first while laying out priorities\n\nSo what’s coming next for Kingfisher and its engineering squads, which have about 600 practitioners?  \n\nAccording to Parmar, Kingfisher already has its priorities mapped out. Using GitLab to [move security left](https://about.gitlab.com/solutions/security-compliance/) is at the top of their list. The group also is focused on continuing to reduce its toolchain, and using automation to increase productivity. And he expects that early in 2025, teams will begin “dabbling” with the artificial intelligence capabilities in [GitLab Duo](https://about.gitlab.com/gitlab-duo/), a suite of AI-powered features in the platform that help increase velocity and solve key pain points across the software development lifecycle. Kingfisher will focus on how that can further increase its efficiency and productivity. \n\nTo get all of this done, Parmar says the first step is to ensure that people come first.\n\n“We’re focused on the hearts and minds of our people... and remembering that people can be attached to how they work through pipelines,” he adds. “People have different ways of building their pipelines. We need to understand what they need, what their workflows look like, and then work with them to find the right solution. After, we’ll go back to them with data that shows the improvements worked. So instead of telling them what they need, we find out what that is, and fix what’s slowing them down. That builds a very good rapport with our engineers.”\n\nChanging how a team creates and deploys software is a journey. Parmar suggests that collaboratively taking developers and security teams on that journey, instead of dragging them along, makes a big difference in ease of migration and in easing team members’ user experience.\n\n> Learn [how other GitLab customers use the DevSecOps platform](https://about.gitlab.com/customers/) to gain results for customers.\n",[904,475,709,916],"workflow",{"slug":918,"featured":91,"template":788},"kingfisher-transforming-the-developer-experience-with-gitlab",{"content":920,"config":929},{"title":921,"description":922,"authors":923,"heroImage":925,"date":926,"body":927,"category":702,"tags":928},"Online retailer bol tackles growing compliance needs with GitLab","Learn how GitLab helps the major international company adhere to regulations while increasing development efficiency.",[924],"Julie Griffin","https://res.cloudinary.com/about-gitlab-com/image/upload/v1749665465/Blog/Hero%20Images/blog-image-template-1800x945__15_.png","2024-06-12","[Bol](https://www.bol.com/nl/nl/), which uses GitLab Ultimate, is one of the largest online retailers in the Netherlands and Belgium. The company offers a product range of 38 million items alongside 50,000 sales partners who sell their goods on its marketplace. Bol relies on innovative technology to increase development efficiency, adhere to compliance regulations, and maintain trust across its extensive customer base.\n\nBol equips its teams with the GitLab DevSecOps platform, enabling its developers to quickly and securely ship projects, while saving the team thousands of manual hours on compliance checks.\n\n“GitLab is helping us stay flexible and competitive as we grow, and as the requirements that our software and our developers need to comply with grow,” says Guus Houtzager, engineering manager on bol’s Continuous Integration and Continuous Deployment team. “That's the biggest challenge that we had and we tackled it with GitLab.”\n\nHowever, as bol's revenue grew, so did the compliance rules and regulations it had to adhere to. The company needs to continually adapt its software to meet strict, and often updated regulations, such as the General Data Protection Regulation (GDPR), International Organization for Standardization (ISO) requirements, and the EU Artificial Intelligence Act.\n\nAfter adopting GitLab Community in 2016 and GitLab Premium several years later, bol upgraded to GitLab Ultimate in 2024 to [meet the growing compliance load](https://about.gitlab.com/solutions/security-compliance/) and help its teams tackle projects faster and more efficiently.\n\n![Guus Houtzager of bol - quote box](https://res.cloudinary.com/about-gitlab-com/image/upload/v1749675638/Blog/Content%20Images/bol_Blog_-_Guus.png)\n\n## Saving thousands of developer hours per month \n\nGitLab enables bol’s DevSecOps teams to set up policies that automate compliance configurations and checks. This helps them achieve consistency and scalability in their compliance efforts, and reduce the risk of human error. With compliance guardrails in place, its team of 850 developers can focus more of their energy on creating innovative, secure software.\n\n“We bought GitLab Ultimate so we can have compulsory compliance pipelines that ensures our teams are working within compliance regulations from the start,” says Houtzager.\n\nBy allowing developers to focus on coding without the burden of compliance regulations, the bol development team dramatically increased its efficiency.\n\n“This has saved our developers several thousands of hours in total per month,” says Houtzager.\n\nIn addition to time savings, the team is now confident it can handle any compliance roadblocks that come its way.\n\n“We know that GitLab is going to help us with compliance and software security,” says Houtzager. “Even if we get new regulations, we have a toolkit, through GitLab, that enables us to follow and comply with any new regulations. We don't know exactly what will happen, but we know we are in a position to handle whatever comes our way.”\n\n## Shifting left to protect customers and its business\n\nAs a large player in the European retail world, trust is a key pillar of bol’s business model. The company handles a large quantity of personal data, such as addresses and order details. While regulatory fines are a concern, so is maintaining trust with its customer base. That only emphasizes the importance of security.\n\n“Most of the people in the Netherlands and Belgium have bought something from us in the past and people trust us,” says Houtzager. “They trust that we handle their payment details properly. We don't sell your Personal Identifiable Information PII data, and they trust us to keep it safe and secure.”\n\nTo protect customer data and its business, bol shifted security left, enabling developers to find errors and vulnerabilities earlier in the development process. However, shifting left without the right tools in place could lead to developers spending countless hours trying to correct any problems they find.\n\n“If you shift left without also providing teams the tools, support, and processes to make sure that they can do this work in an efficient manner, teams get bogged down in either procedures or manual work,” says Houtzager.\n\nWith GitLab Ultimate, bol is able to set up the layout and permission model to meet the company’s security requirements, giving developers the freedom to quickly build and ship projects while protecting customer and business data. The DevSecOps platform has the added benefit of tracking the changes and fixes that developers make and noting them in compliance records. \n\n## Looking ahead to AI\n\nMoving forward, bol plans to use more GitLab Ultimate features, like cloud integration, and artificial intelligence (AI) capabilities, along with even more security features. \n\nFrom building secure software faster to improving the developer experience, bol looks forward to one day using AI-powered [GitLab Duo](https://about.gitlab.com/gitlab-duo/) to help them scale their software development. \n\n“The situation must be right for us to be able to use it and then we will definitely take a look at how it can help us,” says Houtzager. “We, like everybody else, are looking at where AI can help us to improve situations across the entire software development life cycle. So if someone is building code, how can it help them? If someone is working on other aspects of the process, how can it help them?” \n\n> Read more customer stories on [the GitLab customers page](https://about.gitlab.com/customers/).\n",[109,767,904,475],{"slug":930,"featured":6,"template":788},"online-retailer-bol-tackles-growing-compliance-needs-with-gitlab",{"category":709,"slug":713,"posts":932},[933,947,958],{"content":934,"config":944},{"title":935,"description":936,"authors":937,"heroImage":939,"date":940,"body":941,"category":713,"tags":942},"Overcome AI sprawl with a Value Stream Management approach","From The Source: Learn how an AI strategy based on Value Stream Management can stop AI sprawl and supply chain constraints and drive ROI.",[938],"Stephen Walters","https://res.cloudinary.com/about-gitlab-com/image/upload/v1749665000/Blog/Hero%20Images/display-the-source-article-overcome-ai-sprawl-image-0492-1800x945-fy25.png","2025-01-06","This is a cross-over post about [overcoming AI sprawl with a Value Stream Management approach](https://about.gitlab.com/the-source/ai/overcome-ai-sprawl-with-a-value-stream-management-approach/).",[709,916,943],"performance",{"slug":945,"featured":6,"template":788,"externalUrl":946},"overcome-ai-sprawl-with-a-value-stream-management-approach","https://about.gitlab.com/the-source/ai/overcome-ai-sprawl-with-a-value-stream-management-approach/",{"content":948,"config":956},{"title":949,"description":950,"authors":951,"heroImage":953,"date":940,"body":954,"category":713,"tags":955},"Ultimate guide to CI/CD: Fundamentals to advanced implementation","Learn how to modernize continuous integration/continuous deployment, including automating the development, delivery, and security of pipelines.",[952],"Sandra Gittlen","https://res.cloudinary.com/about-gitlab-com/image/upload/v1749660151/Blog/Hero%20Images/blog-image-template-1800x945__26_.png","Continuous integration/continuous delivery ([CI/CD](https://about.gitlab.com/topics/ci-cd/)) has revolutionized how software teams create value for their users. Gone are the days of manual deployments and integration headaches — modern development demands automation, reliability, and speed.\n\nAt its core, CI/CD is about creating a seamless pipeline that takes code from a developer's environment all the way to production and incorporates feedback in real time. [CI](https://about.gitlab.com/topics/ci-cd/benefits-continuous-integration/) helps teams catch issues early — before they become costly problems — by ensuring that code changes are frequently merged into a shared repository, automatically tested, and validated. [CD](https://about.gitlab.com/topics/ci-cd/#what-is-continuous-delivery-cd) extends this by automating deployments, making releases predictable and stress-free.\n\nRather than relying on manual processes and complex toolchains for software development, teams can use a robust CI/CD pipeline to build, test, and deploy software. And AI can streamline the process even further, automatically engineering CI/CD pipelines for consistent quality, compliance, and security checks.\n\nThis guide explains modern CI/CD pipelines, from basic principles to best practices to advanced strategies. You'll also discover how leading organizations use CI/CD for impactful results. What you learn in this guide will help you scale your DevSecOps environment to develop and deliver software in an [agile](https://about.gitlab.com/topics/ci-cd/continuous-integration-agile/), automated, and efficient manner.\n\nWhat you'll learn:\n- [What is continuous integration?](#what-is-continuous-integration%3F)\n- [What is continuous delivery?](#what-is-continuous-delivery%3F)\n- [How source code management relates to CI/CD](#how-source-code-management-relates-to-cicd)\n- [The benefits of CI/CD in modern software development](#the-benefits-of-cicd-in-modern-software-development)\n  - [Key differences between CI/CD and traditional development](#key-differences-between-cicd-and-traditional-development)\n- [Understanding CI/CD fundamentals](#understanding-cicd-fundamentals)\n  - [What is a CI/CD pipeline?](#what-is-a-cicd-pipeline%3F)\n- [Best practices for CI/CD implementation and management](#best-practices-for-cicd-implementation-and-management)\n  - [CI best practices](#ci-best-practices)\n  - [CD best practices](#cd-best-practices)\n- [How to get started with CI/CD](#how-to-get-started-with-cicd)\n- [Security, compliance, and CI/CD](#security-compliance%2C-and-cicd)\n- [CI/CD and the cloud](#cicd-and-the-cloud)\n- [Advanced CI/CD](#advanced-cicd)\n  - [Reuse and automation in CI/CD](#reuse-and-automation-in-cicd)\n  - [Troubleshooting pipelines with AI](#troubleshooting-pipelines-with-ai)\n- [How to migrate to GitLab CI/CD](#how-to-migrate-to-gitlab-cicd)\n- [Lessons from leading organizations](#lessons-from-leading-organizations)\n- [CI/CD tutorials](#cicd-tutorials)\n\n## What is continuous integration?\n\n[Continuous integration](https://about.gitlab.com/topics/ci-cd/benefits-continuous-integration/) (CI) is the practice of integrating all your code changes into the main branch of a shared source code repository early and often, automatically testing changes when you commit or merge them, and automatically kicking off a build. With continuous integration, teams can identify and fix errors and security issues more easily and much earlier in the development process.\n\n## What is continuous delivery?\n[Continuous delivery](https://about.gitlab.com/topics/ci-cd/#what-is-continuous-delivery-cd) (CD) – sometimes called _continuous deployment_ – enables organizations to deploy their applications automatically, allowing more time for developers to focus on monitoring deployment status and assure success. With continuous delivery, DevSecOps teams set the criteria for code releases ahead of time and when those criteria are met and validated, the code is deployed into the production environment. This allows organizations to be more nimble and get new features into the hands of users faster. \n\n## How source code management relates to CI/CD\n\nSource code management ([SCM](https://about.gitlab.com/solutions/source-code-management/)) and CI/CD form the foundation of modern software development practices. SCM systems like [Git](https://about.gitlab.com/blog/what-is-git-the-ultimate-guide-to-gits-role-and-functionality/) provide a centralized way to track changes, manage different versions of code, and facilitate collaboration among team members. When developers work on new features or bug fixes, they create branches from the main codebase, make their changes, and then [merge them through merge requests](https://docs.gitlab.com/ee/user/project/merge_requests/). This branching strategy allows multiple developers to work simultaneously without interfering with each other's code, while maintaining a stable main branch that always contains production-ready code.\n\nCI/CD takes the code managed by SCM systems and automatically builds, tests, and validates it whenever changes are pushed. When a developer submits their code changes, the CI/CD system automatically retrieves the latest code, combines it with the existing codebase, and runs through a series of automated checks. These typically include compiling the code, running unit tests, performing static code analysis, and checking code coverage. If any of these steps fail, the team is immediately notified, allowing them to address issues before they impact other developers or make their way to production. This tight integration between source control and continuous integration creates a feedback loop that helps maintain code quality and prevents integration problems from accumulating.\n\n## The benefits of CI/CD in modern software development\n\n[CI/CD brings transformative benefits to modern software development](https://about.gitlab.com/blog/ten-reasons-why-your-business-needs-ci-cd/) by dramatically reducing the time and risk associated with delivering new features and fixes. The continuous feedback loop gives DevSecOps teams confidence their changes are automatically validated against the entire codebase. The result is higher quality software, faster delivery times, and more frequent releases that can quickly respond to user needs and market demands.\n\nPerhaps most importantly, CI/CD fosters a culture of collaboration and transparency within software development teams. When everyone can see the status of builds, tests, and deployments in real time, it becomes easier to identify and resolve bottlenecks in the delivery process. The automation provided by CI/CD also reduces the cognitive load on developers, freeing them to focus on writing code rather than managing manual deployment processes. This leads to improved developer satisfaction and productivity, while also reducing the risk traditionally associated with the entire software release process. Teams can experiment more freely knowing rapid code reviews are part of the process and they can quickly roll back changes if needed, which encourages innovation and continuous improvement.\n\n> Get started with GitLab CI/CD. [Sign up for GitLab Ultimate](https://about.gitlab.com/free-trial/devsecops/) and try the AI-powered DevSecOps platform free for 60 days.\n\n### Key differences between CI/CD and traditional development\n\nCI/CD differs from traditional software development in many ways, including:\n\n**Frequent code commits**\n\nDevelopers often work independently and infrequently upload their code to a main codebase, causing merge conflicts and other time-consuming issues. With CI/CD, developers push commits throughout the day, ensuring that conflicts are caught early and the codebase remains up to date.\n\n**Reduced risk**\n\nLengthy testing cycles and extensive pre-release planning are hallmarks of traditional software development. This is done to minimize risk but often hinders the ability to find and fix problems. Risk is managed in CI/CD by applying small, incremental changes that are closely monitored and easily reverted.\n\n**Automated and continuous testing**\n\nIn traditional software development, testing is done once development is complete. However, this causes problems, including delayed delivery and costly bug fixes. CI/CD supports automated testing that occurs continuously throughout development, sparked by each code commit. Developers also receive feedback they can take fast action on.\n\n**Automated, repeatable, and frequent deployments**\n\nWith CI/CD, deployments are automated processes that reduce the typical stress and effort associated with big software rollouts. The same deployment process can be repeated across environments, which saves time and reduces errors and inconsistencies.\n\n## Understanding CI/CD fundamentals\n\nCI/CD serves as a framework for building scalable, maintainable delivery processes, so it's critical for DevSecOps teams to firmly grasp its core concepts. A solid understanding of CI/CD principles enables teams to adapt strategies and practices as technology evolves, rather than being tied to legacy approaches. Here are some of the basics.\n\n### What is a CI/CD pipeline?\n\nA [CI/CD pipeline](https://about.gitlab.com/topics/ci-cd/cicd-pipeline/) is a series of steps, such as build, test, and deploy, that automate and streamline the software delivery process. [Each stage serves as a quality gate](https://about.gitlab.com/blog/guide-to-ci-cd-pipelines/), ensuring that only validated code moves forward. Early stages typically handle basic checks like compilation and unit testing, while later stages may include integration testing, performance testing, compliance testing, and staged deployments to various environments.\n\nThe pipeline can be configured to require manual approvals at critical points, such as before deploying to production, while automating routine tasks and providing quick feedback to developers about the health of their changes. This structured approach ensures consistency, reduces human error, and provides a clear audit trail of how code changes move from development to production. Modern pipelines are often implemented as code, allowing them to be version controlled, tested, and maintained just like application code.\n\nThese are other terms associated with CI/CD that are important to know:\n- **Commit:** a code change\n- **Job:** instructions a runner has to execute\n- **Runner:** an agent or server that executes each job individually that can spin up or down as needed\n- **Stages:** a keyword that defines certain job stages, such as \"build\" and \"deploy.\" Jobs of the same stage are executed in parallel. Pipelines are configured using a version-controlled YAML file, `.gitlab-ci.yml`, at the root level of a project.\n\n![CI/CD pipeline diagram](https://res.cloudinary.com/about-gitlab-com/image/upload/v1749673928/Blog/Content%20Images/1690824533476.png)\n\n## Best practices for CI/CD implementation and management\n\nHow successful you are with CI/CD depends greatly on the [best practices](https://about.gitlab.com/blog/how-to-keep-up-with-ci-cd-best-practices/) you implement. \n\n#### CI best practices\n\n* Commit early, commit often.\n* Optimize pipeline stages.\n* Make builds fast and simple.\n* Use failures to improve processes.\n* Make sure the test environment mirrors production.\n\n#### CD best practices\n\n* Start where you are – you can always iterate.\n* Understand the best continuous delivery is done with minimal tools.\n* Track what’s happening so issues and merge requests don't get out of hand.\n* Streamline user acceptance testing and staging with automation.\n* Manage the release pipeline through automation.\n* Implement monitoring for visibility and efficiency. \n\n> ### Bookmark this!\n>\n>Watch our [\"Intro to CI/CD\" webinar](https://www.youtube.com/watch?v=sQ7Nw3o0izc)!\n>\n\u003C!-- blank line -->\n\u003Cfigure class=\"video_container\">\n\u003Ciframe src=\"https://www.youtube.com/embed/sQ7Nw3o0izc?si=3HpNqIClrc2ncr7Y\" title=\"Intro to CI/CD webinar\" frameborder=\"0\" allowfullscreen=\"true\"> \u003C/iframe>\n\u003C/figure>\n\u003C!-- blank line -->\n\n## How to get started with CI/CD\n\nGetting started with CI/CD begins with identifying a simple but representative project to serve as your pilot. Choose a straightforward application with basic testing requirements, as this allows you to focus on learning the pipeline mechanics rather than dealing with complex deployment scenarios. Begin by ensuring your code is in [version control](https://about.gitlab.com/topics/version-control/) and has some [basic automated tests](https://about.gitlab.com/blog/develop-c-unit-testing-with-catch2-junit-and-gitlab-ci/) — even a few unit tests will suffice. The goal is to [create a minimal pipeline](https://about.gitlab.com/blog/how-to-learn-ci-cd-fast/) that you can gradually enhance as your understanding grows.\n\nFor GitLab specifically, the process starts with creating a `.gitlab-ci.yml` file in your project's root directory. This YAML file defines your pipeline stages (basic ones like build, test, and deploy) and jobs. A simple pipeline might look like this: The build stage compiles your code and creates artifacts, the test stage runs your unit tests, and the deploy stage pushes your application to a staging environment. GitLab will automatically detect this file and start running your pipeline whenever changes are pushed to your repository. The platform provides [built-in runners](https://docs.gitlab.com/runner/) to execute your pipeline jobs, though you can also set up your own runners for more control.\n\nAs you become comfortable with the basics, gradually add more sophisticated elements to your pipeline. This might include adding code quality checks, [security scanning](https://docs.gitlab.com/ee/user/application_security/#security-scanning), or automated deployment to production. GitLab's DevSecOps platform includes features like [compliance management](https://about.gitlab.com/blog/meet-regulatory-standards-with-gitlab/), [deployment variables](https://about.gitlab.com/blog/demystifying-ci-cd-variables/), and manual approval gates that you can incorporate as your pipeline matures. Pay attention to pipeline execution time and look for opportunities to run jobs in parallel where possible. Remember to add proper error handling and notifications so team members are promptly alerted of any pipeline failures. Start documenting common issues and solutions as you encounter them — this will become invaluable as your team grows.\n\n> ### Want to learn more about getting started with CI/CD? Register for a [free CI/CD course on GitLab University](https://university.gitlab.com/courses/continuous-integration-and-delivery-ci-cd-with-gitlab).\n\n## Security, compliance, and CI/CD\n\nOne of the greatest advantages of CI/CD is the ability to embed security and compliance checks early and often in the software development lifecycle. In GitLab, teams can use the `.gitlab-ci.yml` configuration to automatically trigger security scans at multiple stages, from initial code commit to production deployment. The platform's container scanning, dependency scanning, and security scanning capabilities ([Dynamic Application Security Testing](https://docs.gitlab.com/ee/user/application_security/dast/) and [Advanced SAST](https://about.gitlab.com/blog/gitlab-advanced-sast-is-now-generally-available/)) can be configured to run automatically with each code change, checking for vulnerabilities, compliance violations, and security misconfigurations. The platform's API enables integration with [external security tools](https://about.gitlab.com/blog/integrate-external-security-scanners-into-your-devsecops-workflow/), while the test coverage features ensure security tests meet required thresholds.\n\nGitLab's security test reports provide detailed information about findings, enabling quick remediation of security issues before they reach production. The Security Dashboard provides a centralized view of vulnerabilities across projects, while [security policies can be enforced](https://about.gitlab.com/blog/how-gitlab-supports-the-nsa-and-cisa-cicd-security-guidance/) through merge request approvals and pipeline gates. In addition, GitLab provides multiple layers of secrets management to protect sensitive information throughout the CI/CD process, audit logs to track access to secrets, and role-based access control (RBAC) to ensure only authorized users can view or modify sensitive configuration data.\n\nGitLab also supports software bill of materials ([SBOM](https://about.gitlab.com/blog/the-ultimate-guide-to-sboms/)) generation, providing a comprehensive inventory of all software components, dependencies, and licenses in an application and enabling teams to quickly identify and respond to vulnerabilities and comply with regulatory mandates.\n\n## CI/CD and the cloud\n\nGitLab's CI/CD platform provides robust integration with major cloud providers including [Amazon Web Services](https://about.gitlab.com/partners/technology-partners/aws/), [Google Cloud Platform](https://about.gitlab.com/blog/provision-group-runners-with-google-cloud-platform-and-gitlab-ci/), and [Microsoft Azure](https://docs.gitlab.com/ee/install/azure/), enabling teams to automate their cloud deployments directly from their pipelines. Through GitLab's cloud integrations, teams can manage cloud resources, deploy applications, and monitor cloud services all within the GitLab interface. The platform's built-in cloud deployment templates and [Auto DevOps](https://docs.gitlab.com/ee/topics/autodevops/) features significantly reduce the complexity of cloud deployments, allowing teams to focus on application development rather than infrastructure management. For organizations that want to automate their IT   infrastructure using GitOps, GitLab has a [Flux CD integration](https://about.gitlab.com/blog/why-did-we-choose-to-integrate-fluxcd-with-gitlab/).\n\nGitLab's cloud capabilities extend beyond basic deployment automation. The platform's [Kubernetes integration](https://about.gitlab.com/blog/kubernetes-overview-operate-cluster-data-on-the-frontend/) enables teams to manage container orchestration across multiple cloud providers, while the [cloud native GitLab installation options](https://about.gitlab.com/topics/ci-cd/cloud-native-continuous-integration/) allow the platform itself to run in cloud environments. Through GitLab's cloud-native features, teams can implement auto-scaling runners that dynamically provision cloud resources for pipeline execution, optimizing costs and performance. The platform's integration with cloud provider security services ensures that security and compliance requirements are met throughout the deployment process.\n\nFor multi-cloud environments, GitLab provides consistent workflows and tooling regardless of the underlying cloud provider. Teams can use GitLab's environment management features to handle different cloud configurations across development, staging, and production environments. The platform's [infrastructure as code](https://docs.gitlab.com/ee/user/infrastructure/iac/) support, particularly its native integration with Terraform, enables teams to version control and automate their cloud infrastructure provisioning. GitLab's monitoring and observability features integrate with cloud provider metrics, providing comprehensive visibility into application and infrastructure health across cloud environments.\n\n## Advanced CI/CD \nCI/CD has evolved far beyond simple build and deploy pipelines. In advanced implementations, CI/CD involves sophisticated orchestration of automated testing, security scanning, infrastructure provisioning, AI, and more. Here are a few advanced CI/CD strategies that can help engineering teams scale their pipelines and troubleshoot issues even as architectural complexity grows.\n\n### Reuse and automation in CI/CD\n\nGitLab is transforming how development teams create and manage CI/CD pipelines with two major innovations: the [CI/CD Catalog](https://about.gitlab.com/blog/ci-cd-catalog-goes-ga-no-more-building-pipelines-from-scratch/) and [CI/CD steps](https://about.gitlab.com/blog/introducing-ci-cd-steps-a-programming-language-for-devsecops-automation/), a new programming language for DevSecOps automation currently in experimental phase. The CI/CD Catalog is a centralized platform where developers can discover, reuse, and contribute CI/CD components. Components function as reusable, single-purpose building blocks that simplify pipeline configuration — similar to Lego pieces for CI/CD workflows. Meanwhile, CI/CD steps support complex workflows by allowing developers to compose inputs and outputs for a CI/CD job. With the CI/CD Catalog and CI/CD steps, DevSecOps teams can easily standardize CI/CD and its components, simplifying the process of developing and maintaining CI/CD pipelines.\n\n> Learn more in our [CI/CD Catalog FAQ](https://about.gitlab.com/blog/faq-gitlab-ci-cd-catalog/) and [CI/CD steps documentation](https://docs.gitlab.com/ee/ci/steps/).\n\n### Troubleshooting pipelines with AI\n\nWhile CI/CD pipelines can and do break, troubleshooting the issue quickly can minimize the impact. GitLab Duo Root Cause Analysis, part of a suite of AI-powered features, removes the guesswork by [determining the root cause for a failed CI/CD pipeline](https://about.gitlab.com/blog/quickly-resolve-broken-ci-cd-pipelines-with-ai/). When a pipeline fails, GitLab provides detailed job logs, error messages, and execution traces that show exactly where and why the failure occurred. Root Cause Analysis then uses AI to suggest a fix.\nWatch GitLab Duo Root Cause Analysis in action:\n\n\u003C!-- blank line -->\n\u003Cfigure class=\"video_container\">\n\u003Ciframe src=\"https://www.youtube.com/embed/sTpSLwX5DIs?si=J6-0Bf6PtYjrHX1K\" frameborder=\"0\" allowfullscreen=\"true\"> \u003C/iframe>\n\u003C/figure>\n\u003C!-- blank line -->\n\n## How to migrate to GitLab CI/CD\n\nMigrating to the DevSecOps platform and its built-in CI/CD involves a systematic approach of analyzing your existing pipeline configurations, dependencies, and deployment processes to map them to GitLab's equivalent features and syntax. Use these guides to help make the move.\n\n* [How to migrate from Bamboo to GitLab CI/CD](https://about.gitlab.com/blog/migrating-from-bamboo-to-gitlab-cicd/)\n* [Jenkins to GitLab: The ultimate guide to modernizing your CI/CD environment](https://about.gitlab.com/blog/jenkins-gitlab-ultimate-guide-to-modernizing-cicd-environment/)\n* [GitHub to GitLab migration the easy way](https://about.gitlab.com/blog/github-to-gitlab-migration-made-easy/)\n\n## Lessons from leading organizations\n\nThese leading organizations migrated to GitLab and are enjoying the myriad benefits of CI/CD. Read their stories.\n\n- [Lockheed Martin](https://about.gitlab.com/customers/lockheed-martin/)\n- [Indeed](https://about.gitlab.com/blog/how-indeed-transformed-its-ci-platform-with-gitlab/)\n- [CARFAX](https://about.gitlab.com/customers/carfax/)\n- [HackerOne](https://about.gitlab.com/customers/hackerone/)\n- [Betstudios](https://about.gitlab.com/blog/betstudios-cto-on-improving-ci-cd-capabilities-with-gitlab-premium/)\n- [Thales and Carrefour](https://about.gitlab.com/blog/how-carrefour-and-thales-are-evolving-their-ci-cd-platforms/)\n\n## CI/CD tutorials\n\nBecome a CI/CD expert with these easy-to-follow tutorials.\n\n* [Basics of CI: How to run jobs sequentially, in parallel, or out of order](https://about.gitlab.com/blog/basics-of-gitlab-ci-updated/)\n* [How to set up your first GitLab CI/CD component](https://about.gitlab.com/blog/tutorial-how-to-set-up-your-first-gitlab-ci-cd-component/)\n* [Building a GitLab CI/CD pipeline for a monorepo the easy way](https://about.gitlab.com/blog/building-a-gitlab-ci-cd-pipeline-for-a-monorepo-the-easy-way/)\n* [Using child pipelines to continuously deploy to five environments](https://about.gitlab.com/blog/using-child-pipelines-to-continuously-deploy-to-five-environments/)\n* [CI/CD automation: Maximize 'deploy freeze' impact across GitLab groups](https://about.gitlab.com/blog/ci-cd-automation-maximize-deploy-freeze-impact-across-gitlab-groups/)\n* [Refactoring a CI/CD template to a CI/CD component](https://about.gitlab.com/blog/refactoring-a-ci-cd-template-to-a-ci-cd-component/)\n* [Annotate container images with build provenance using Cosign in GitLab CI/CD](https://about.gitlab.com/blog/annotate-container-images-with-build-provenance-using-cosign-in-gitlab-ci-cd)\n\n> #### Get started with GitLab CI/CD. [Sign up for GitLab Ultimate](https://about.gitlab.com/free-trial/devsecops/) and try the AI-powered DevSecOps platform free for 60 days.",[109,709,475,827,767,757],{"slug":957,"featured":91,"template":788},"ultimate-guide-to-ci-cd-fundamentals-to-advanced-implementation",{"content":959,"config":969},{"title":960,"description":961,"authors":962,"heroImage":963,"date":964,"body":965,"category":713,"tags":966},"Develop C++ unit testing with Catch2, JUnit, and GitLab CI","Learn how to set up, write, and automate C++ unit tests using Catch2 with GitLab CI/CD. See examples from a working air quality app project and AI-powered help from GitLab Duo.",[897],"https://res.cloudinary.com/about-gitlab-com/image/upload/v1749659684/Blog/Hero%20Images/AdobeStock_479904468__1_.jpg","2024-07-02","Continuous integration (CI) and automated testing are important DevSecOps workflows for software developers to detect bugs early, improve code quality, and streamline their development processes. \n\nIn this tutorial, you'll learn how to set up unit testing on a `C++` project with [Catch2](https://github.com/catchorg/Catch2) and GitLab CI for continuous integration. You'll also see how the AI-powered features of [GitLab Duo](https://about.gitlab.com/gitlab-duo/) can help. We’ll use [an air quality monitoring application](https://gitlab.com/gitlab-da/use-cases/ai/ai-applications/air-quality-app) as our reference project.\n\n## Prerequisites\n\n- Ensure you have [CMake](https://cmake.org/ \"CMake\") installed on your machine. \n- A modern `C++` compiler such as GCC or Clang is required. \n- An API key from [OpenWeatherMap](https://openweathermap.org/api) - requires signing up for a free account (1,000/calls per day are included for free). \n\n## Set up the application for testing\n\nThe reference project we’ll be using for demonstrating testing in this blog post is an air quality monitoring application that fetches air quality data from the OpenWeatherMap API based on the U.S zip codes only provided by the user.\n\nHere are the steps to set up the application for testing:\n\n1. Fork the [the reference project](https://gitlab.com/gitlab-da/use-cases/ai/ai-applications/air-quality-app) and clone the fork to your local environment.\n\n2. Generate an API key from  [OpenWeatherMap](https://openweathermap.org/) and export it into the environment. \n\n```shell\nexport API_KEY=\"YOURAPIKEY_HERE\"\n```\n\n3. Alternatively, you can add the key into your `.env` configuration, and source it with `source ~/.env`, or use a different mechanism to populate the environment.\n\n4. Compile and build the project code with the following instructions:\n\n```cpp\ncmake -S . -B build\ncmake --build build\n```\n\n5. Run the application using the executable and passing in a U.S zip code (90210 as an example): \n\n```cpp\n./build/air_quality_app 90210\n```\n\nHere’s an example of what running the program will look like in your terminal:  \n\n```bash\n❯ ./build/air_quality_app 90210\nAir Quality Index (AQI) for Zip Code 90210: 2 (Fair)\n```\n\n## Install Catch2\n\nNow that the application is set up and working, let's start working on adding testing using Catch2. Catch2 is a modern, `C++-native` testing framework for unit tests. \n\nYou can also ask GitLab Duo Chat within your IDE for an introduction to getting started with Catch2 as a `C++` testing framework. GitLab Duo Chat will provide getting started steps as well as an example test: \n\n![GitLab Duo Chat starting steps and example test](https://res.cloudinary.com/about-gitlab-com/image/upload/v1749676997/Blog/Content%20Images/1.duo-chat-installing-catch2.png)\n\n1. First navigate to your project’s root directory and create an externals folder using the `mkdir` command.\n\n```shell\nmkdir externals\n```\n\n2. There are several ways to install Catch2 via [its CMake integration](https://github.com/catchorg/Catch2/blob/devel/docs/cmake-integration.md#top). We will use the option of installing it as a submodule and including it as part of the source code to simplify dependency management. To add Catch2 to your project in the `externals` folder: \n\n```shell\ngit submodule add https://github.com/catchorg/Catch2.git externals/Catch2\ngit submodule update --init --recursive\n```\n\n3. Update `CMakeLists.txt` to include Catch2’s directory as a subdirectory. This allows CMake to find and build Catch2 as a part of our project. \n\n```cpp\n# Assuming Catch2 in externals/Catch2\nadd_subdirectory(externals/Catch2)\n```\n\n4. Create a `tests.cpp` file in your project root to write our tests to: \n\n```shell\ntouch tests.cpp\n```\n\n5. Update `CMakeLists.txt` Link against Catch2. When defining your test executable in CMake, link it against Catch2:\n\n```cpp\n# Add tests executable and link it to Catch2\nadd_executable(tests test.cpp)\ntarget_link_libraries(tests PRIVATE Catch2::Catch2WithMain)\n```\n\n## Structure the project for testing\n\nBefore we start writing our tests, we should separate our application logic into separate files in order to maintain and test our code more efficiently. At the end of this section we should have:\n\n```\nmain.cpp containing only the main() function and application setup\nincludes/functions.cpp containing all functional code such as API calls and data processing: \nincludes/functions.h containing the declarations for the functions defined in functions.cpp.  It needs to define the preprocessor macro guards, and include all necessary headers. \n```\n\nApply the following changes to the files: \n\n1. `main.cpp`\n\n```cpp\n#include \u003Ciostream>\n#include \"functions.h\"\n\nint main(int argc, char* argv[]) {\n   if (argc \u003C 2) {\n       std::cerr \u003C\u003C \"Usage: \" \u003C\u003C argv[0] \u003C\u003C \" \u003CZip Code>\" \u003C\u003C std::endl;\n       return 1;\n   }\n\n   std::string zipCode = argv[1];\n   std::string apiKey = getApiKey();\n   if (apiKey.empty()) {\n       std::cerr \u003C\u003C \"API key not found.\" \u003C\u003C std::endl;\n       return 1;\n   }\n\n   auto [lat, lon] = geocodeZipcode(zipCode, apiKey);\n   if (lat == 0 && lon == 0) {\n       std::cerr \u003C\u003C \"Failed to geocode zipcode.\" \u003C\u003C std::endl;\n       return 1;\n   }\n\n   std::string response = fetchAirQuality(lat, lon, apiKey);\n   std::string airQualityInfo = parseAirQualityResponse(response);\n\n   std::cout \u003C\u003C \"Air Quality Index for Zip Code \" \u003C\u003C zipCode \u003C\u003C \": \" \u003C\u003C airQualityInfo \u003C\u003C std::endl;\n\n   return 0;\n}\n```\n\n2. Create a `functions.h:` in the `includes` folder: \n\n```cpp\n#ifndef FUNCTIONS_H\n#define FUNCTIONS_H\n\n#include \u003Cstring>\n#include \u003Cutility>\n#include \u003Cvector>\n\n// Declare the function prototype\nstd::string httpRequest(const std::string& url);\nbool loadEnvFile(const std::string& filename);\nstd::string getApiKey();\nstd::pair\u003Cdouble, double> geocodeZipcode(const std::string& zipCode, const std::string& apiKey);\nstd::string fetchAirQuality(double lat, double lon, const std::string& apiKey);\nstd::string parseAirQualityResponse(const std::string& response);\n\n#endif\n```\n\n3. Create a `functions.cpp` in the `includes` folder: \n\n```cpp\n#include \"functions.h\"\n#include \u003Cfstream>\n#include \u003Celnormous/HTTPRequest.hpp>\n#include \u003Cnlohmann/json.hpp>\n#include \u003Ciostream>\n#include \u003Ccstdlib> // For getenv\n\nstd::string httpRequest(const std::string& url) {\n   try {\n       http::Request request{url};\n       const auto response = request.send(\"GET\");\n       return std::string{response.body.begin(), response.body.end()};\n   } catch (const std::exception& e) {\n       std::cerr \u003C\u003C \"Request failed, error: \" \u003C\u003C e.what() \u003C\u003C std::endl;\n       return \"\";\n   }\n}\nstd::string getApiKey() {\n   const char* envApiKey = std::getenv(\"API_KEY\");\n   if (envApiKey) {\n       return std::string(envApiKey);\n   }\n   // If the environment variable is not set, fallback to the config file\n   std::ifstream configFile(\"config.txt\");\n   std::string line;\n   if (getline(configFile, line)) {\n       return line.substr(line.find('=') + 1);\n   }\n   return \"\";\n}\n\nstd::pair\u003Cdouble, double> geocodeZipcode(const std::string& zipCode, const std::string& apiKey) {\n   std::string url = \"http://api.openweathermap.org/geo/1.0/zip?zip=\" + zipCode + \",US&appid=\" + apiKey;\n   std::string response = httpRequest(url);\n   try {\n       auto json = nlohmann::json::parse(response);\n       if (json.contains(\"lat\") && json.contains(\"lon\")) {\n           double lat = json[\"lat\"];\n           double lon = json[\"lon\"];\n           return {lat, lon};\n       } else {\n           std::cerr \u003C\u003C \"Geocode response missing 'lat' or 'lon' fields: \" \u003C\u003C response \u003C\u003C std::endl;\n       }\n   } catch (const nlohmann::json::parse_error& e) {\n       std::cerr \u003C\u003C \"Failed to parse geocode response: \" \u003C\u003C e.what() \u003C\u003C \" - Response: \" \u003C\u003C response \u003C\u003C std::endl;\n   }\n   return {0, 0};\n}\n\nstd::string fetchAirQuality(double lat, double lon, const std::string& apiKey) {\n   std::string url = \"http://api.openweathermap.org/data/2.5/air_pollution?lat=\" + std::to_string(lat) + \"&lon=\" + std::to_string(lon) + \"&appid=\" + apiKey;\n   std::string response = httpRequest(url);\n   return response;\n}\n\nstd::string parseAirQualityResponse(const std::string& response) {\n   try {\n       auto json = nlohmann::json::parse(response);\n       if (json.contains(\"list\") && !json[\"list\"].empty() && json[\"list\"][0].contains(\"main\")) {\n           int aqi = json[\"list\"][0][\"main\"][\"aqi\"];\n           std::string aqiCategory;\n           switch (aqi) {\n               case 1:\n                   aqiCategory = \"Good\";\n                   break;\n               case 2:\n                   aqiCategory = \"Fair\";\n                   break;\n               case 3:\n                   aqiCategory = \"Moderate\";\n                   break;\n               case 4:\n                   aqiCategory = \"Poor\";\n                   break;\n               case 5:\n                   aqiCategory = \"Very Poor\";\n                   break;\n               default:\n                   aqiCategory = \"Unknown\";\n                   break;\n           }\n           return std::to_string(aqi) + \" (\" + aqiCategory + \")\";\n       } else {\n           return \"No AQI data available\";\n       }\n   } catch (const std::exception& e) {\n       std::cerr \u003C\u003C \"Failed to parse JSON response: \" \u003C\u003C e.what() \u003C\u003C std::endl;\n       return \"Error parsing AQI data\";\n   }\n}\n\n```\n\n4. Now that we have separated the source files, we also need to update our `CMakeLists.txt` to include `functions.cpp` in the `add_executable()` calls:\n\n```cpp\ncmake_minimum_required(VERSION 3.14)\nproject(air-quality-app)\n\n# Set the C++ standard for the project\nset(CMAKE_CXX_STANDARD 17)\nset(CMAKE_CXX_STANDARD_REQUIRED ON)\nset(CMAKE_CXX_EXTENSIONS OFF)\n\ninclude_directories(${CMAKE_SOURCE_DIR}/includes)\n\n# Define the main program executable\nadd_executable(air_quality_app main.cpp includes/functions.cpp)\n\n# Assuming Catch2 in externals/Catch2\nadd_subdirectory(externals/Catch2)\n\n# Add tests executable and link it to Catch2\nadd_executable(tests tests.cpp includes/functions.cpp)\ntarget_link_libraries(tests PRIVATE Catch2::Catch2WithMain)\n```\n\nTo verify that the changes are working, regenerate the CMake configuration and rebuild the source code with the following commands. The build will take longer now that we're compiling Catch2 files. \n\n```shell\nrm -rf build # delete existing build files\ncmake -S . -B build \ncmake --build build  \n```\n\nYou should be able to run the application without any errors.\n\n```shell\n./build/air_quality_app 90210\n```\n\n## Write tests in Catch2  \n\nCatch2 tests are made up of [macros and assertions](https://github.com/catchorg/Catch2/blob/devel/docs/assertions.md). Macros in Catch2 are used to define test cases and sections within those test cases. They help in organizing and structuring the tests. Assertions are used to verify that the code behaves as expected. If an assertion fails, the test case will fail, and Catch2 will report the failure.\n\nLet’s review a basic test scenario for an addition function to understand. Note: This test is read-only, as an example. \n\n```cpp\nint add(int a, int b) {\n   return a + b;\n}\n\nTEST_CASE(\"Addition works correctly\", \"[math]\") {\n   REQUIRE(add(1, 1) == 2);  // Test passes if 1+1 equals 2\n   REQUIRE(add(2, 2) != 5);  // Test passes if 2+2 does not equal 5\n}\n```\n\n- Each test begins with the `TEST_CASE` macro, which defines a test case container. The macro accepts two parameters: a string describing the test case and optionally a second string for tagging the test for easy filtering.\n- Tests are also composed of assertions, which are statements that check if conditions are true. Catch2 provides macros for assertion that include `REQUIRE`, which aborts the current test if the assertion fails, and `CHECK`, which logs the failure but continues with the current test.\n\n### Prepare to write tests with Catch2\n\nTo test the API retrieval functions in our air quality application, we’ll be using mock API requests. Mock API testing is a technique used to test how your application will interact with an external API without making any real API calls. Instead of sending requests to a live API server, we can simulate the responses using predefined data. Mock requests allow us to control the input data and specify exactly what the API would return for different requests, making sure that our tests aren't affected by changes in the real API responses or unexpected data. This also makes it easier for us to simulate and catch different failures.\n\nIn our `tests.cpp` file, let’s define the following function to run mock API requests.   \n\n```cpp\n#include \"includes/functions.h\"\n#include \u003Ccatch2/catch_test_macros.hpp>\n#include \u003Cstring>\n\n// Mock HTTP request function that simulates API responses\nstd::string mockHttpRequest(const std::string& url) {\n   if (url.find(\"geo\") != std::string::npos) {\n       // Mock response for geocoding\n       return R\"({\"lat\": 40.7128, \"lon\": -74.0060})\"; \n   } else if (url.find(\"air_pollution\") != std::string::npos) {\n       // Mock response for air quality\n       return R\"({\"list\": [{\"main\": {\"aqi\": 2}}]})\";\n   }\n   // Default mock response for unmatched endpoints\n   return \"{}\";\n}\n// Overriding the actual httpRequest function with the mockHttpRequest for testing\nstd::string httpRequest(const std::string& url) {\n   return mockHttpRequest(url);\n}\n```\n\n- This function simulates HTTP requests and returns predefined JSON responses based on the URL given as input. \n- It also checks the URL to determine which type of data is being requested based on the functionality of the application (geocoding, air pollution, or forecast data). If the URL doesn’t match the expected endpoint, it returns an empty JSON object. \n\nDon't compile the code just yet, as you'll see a linker error. Since we're overriding the original `httpRequest` function with our mock function for testing, we'll need a preprocessor macro to enable conditional compilation - indicating which `httpRequest` function should run when we're compiling tests. \n\n#### Define a preprocessor macro for testing  \n\nBecause we’ve overridden `httpRequest` in our `tests.cpp`, we need to exclude that code from `functions.cpp` when we’re testing. When building tests, we may need to ensure that certain parts of our code behave differently or are excluded. We can do this by defining a preprocessor macro `TESTING` which enables conditional compilation, allowing us to selectively include or exclude code when compiling the test target:  \n\nWe define the `TESTING` macro in our `CMakeLists.txt` at the end:  \n\n```cpp\n# Define TESTING macro for this target\ntarget_compile_definitions(tests PRIVATE TESTING)\n```\n\nAnd add the macro wrapper in  `functions.cpp` around the original `httpRequest` function:  \n\n```cpp\n#ifndef TESTING  // Exclude this part when TESTING is defined\nstd::string httpRequest(const std::string& url) {\n   try {\n       http::Request request{url};\n       const auto response = request.send(\"GET\");\n       return std::string{response.body.begin(), response.body.end()};\n   } catch (const std::exception& e) {\n       std::cerr \u003C\u003C \"Request failed, error: \" \u003C\u003C e.what() \u003C\u003C std::endl;\n       return \"\";\n   }\n}\n#endif\n```\n\nRegenerate the CMake configuration and rebuild the source code to verify it works.\n\n```shell\ncmake --build build  \n```\n\n### Write the first tests \n\nNow, let’s write some tests for our air quality application.\n\n#### Test 1: Verify API key retrieval \n\nThis test ensures that the `getApiKey` function retrieves the API key correctly from the environment variable or the configuration file. Add the test case to our `tests.cpp`:\n\n```cpp\n\nTEST_CASE(\"API Key Retrieval\", \"[api]\") {\n   // Set the API_KEY environment variable for testing\n   setenv(\"API_KEY\", \"test_key\", 1);\n   // Test if the key is retrieved correctly\n   REQUIRE(getApiKey() == \"test_key\");\n}\n```\n\nYou can verify that this tests passes by rebuilding the code and running the tests:\n\n```shell\ncmake --build build\n./build/tests\n```\n\n#### Test 2: Geocode the zip code\n\nThis test ensures that the `geocodeZipcode` function returns the correct latitude and longitude for a given zip code using the mock API response function we set up earlier. The  `geocodeZipcode` function is supposed to hit an API that returns geographic coordinates based on a zip code. \n\nIn `tests.cpp`, add this test case for the zip code 90210: \n\n```cpp\nTEST_CASE(\"Geocode Zip code\", \"[geocode]\") {\n   std::string apiKey = \"test_key\";\n   std::pair\u003Cdouble, double> coordinates = geocodeZipcode(\"90210\", apiKey);\n   // Check latitude\n   REQUIRE(coordinates.first == 40.7128);\n   // Check longitude \n   REQUIRE(coordinates.second == -74.0060);\n}\n```\n\nThe purpose of this test is to verify that the function `geocodeZipcode` can correctly parse the latitude and longitude from the API response. By hardcoding the expected response, we ensure that the test environment is controlled and predictable.\n\n #### Test 3: Air quality API test\n\nThis test ensures that the `fetchAirQuality` function correctly fetches air quality data using the mock API response function we set up earlier. It verifies that the function constructs the API request properly, sends it, and accurately parses the air quality index (AQI) from the mock JSON response. This validation helps ensure that the overall process of fetching and interpreting air quality data works as intended.\n\n```cpp\nTEST_CASE(\"Fetch Air Quality\", \"[airquality]\") {\n   std::string apiKey = \"test_key\";\n   double lat = 40.7128;\n   double lon = -74.0060;\n   std::string response = fetchAirQuality(lat, lon, apiKey);\n   // Check the response\n   REQUIRE(response == R\"({\"list\": [{\"main\": {\"aqi\": 2}}]})\");\n}\n```\n\n## Build and run the tests\n\nTo  build and compile our application, we'll use the same CMake commands as before:\n\n```cpp\ncmake -S . -B build\ncmake --build build\n\n```\n\nAfter building, we can run our tests by executing the test binary:  \n\n```cpp\n./build/tests\n\n```\n\nRunning this command will execute all defined tests, and you will see output indicating whether each test has passed or failed.\n\n![Output showing pass/fail of tests](https://res.cloudinary.com/about-gitlab-com/image/upload/v1749676998/Blog/Content%20Images/2.running-catch2-tests.png)\n\n## Set up GitLab CI/CD\n\nTo automate the testing process each time we push some new code to our repository, let’s set up [GitLab CI/CD](https://about.gitlab.com/topics/ci-cd/). Create a new `.gitlab-ci.yml` configuration file in the root directory. \n\n```yaml\nimage: gcc:latest\n\nvariables:\n GIT_SUBMODULE_STRATEGY: recursive\n\nstages:\n - build\n - test\n\nbefore_script:\n - apt-get update && apt-get install -y cmake\n\ncompile:\n stage: build\n script:\n   - cmake -S . -B build\n   - cmake --build build\n artifacts:\n   paths:\n     - build/\n\ntest:\n stage: test\n script:\n   - ./build/tests --reporter junit -o test-results.xml\n artifacts:\n   reports:\n     junit: test-results.xml\n```\n\nThis CI/CD configuration will compile both the main application and the test suite, then run the tests, generating a JUnit XML report which GitLab uses to display the test results.  \n\n- In `before_script`, we added an installation for `cmake`, and `git submodule sync --recursive` which initializes and updates our submodules (catch2). \n- In the `test` stage, `--reporter junit -o test-results.xml` specifies that the test results should be treated as a JUnit report which allows GitLab CI to display results in the UI. This is super helpful when you have several tests in your application.  \n\nWe also need to [add an environmental variable](https://docs.gitlab.com/ee/ci/variables/#define-a-cicd-variable-in-the-ui) with the `API_KEY` in project settings on GitLab.\n\nDon’t forget to add all new files to Git, and commit and push the changes in a new MR:\n\n```shell\ngit checkout -b tests-catch2-cicd\n\ngit add includes/functions.{h,cpp} tests.cpp .gitlab-ci.yml \ngit add CMakeLists.txt main.cpp \n\ngit commit -vm “Add Catch2 tests and CI/CD configuration”\ngit push \n```\n\n## View the test report\n\nAfter pushing our code changes, we can review the results of our tests in the GitLab UI in the Pipeline view in the `Tests` tab:\n\n![GitLab pipeline view shows test results](https://res.cloudinary.com/about-gitlab-com/image/upload/v1749676998/Blog/Content%20Images/2.0-passed-tests-UI.png)\n\n## Simulate a test failure\n\nTo demonstrate how our UI will handle test failures, we can intentionally introduce a bug into our code and observe the resulting behavior. \n\nLet's modify our `parseAirQualityResponse` function to introduce an error. We can change the AQI category for an AQI value of 2 from \"Fair\" to \"Poor.\" This change will cause the related test to fail, allowing us to see the test failure in the GitLab UI.\n\nIn `functions.cpp`, find the `parseAirQualityResponse` function and modify the switch statement for case `2` to set the `Poor` value instead of `Fair`:\n\n```cpp\n               // Intentional bug:\n               case 2:\n                   aqiCategory = \"Poor\";\n                   break;\n```\n\nIn tests.cpp, add a new test case that directly checks the output of the `parseAirQualityResponse` function. This test ensures that the `parseAirQualityResponse` function correctly parses and categorizes the air quality data from the mock API response. This function takes a JSON response, extracts the AQI value, and translates it into a human-readable category.\n\n```cpp\n\nTEST_CASE(\"Parse Air Quality Response\", \"[airquality]\") {\n   std::string mockResponse = R\"({\"list\": [{\"main\": {\"aqi\": 2}}]})\";\n   std::string result = parseAirQualityResponse(mockResponse);\n   // This should fail due to the intentional bug\n   REQUIRE(result == \"2 (Fair)\");\n}\n\n```\n\nCommit the changes, and push them into the MR. Open the MR in your browser. \n\nBy introducing an intentional bug in this function, we can see how a test failure is reported in GitLab's pipelines UI. We must add, commit, and push the changes to our repository to view the test failure in the pipeline. \n\n![Simulated test failure](https://res.cloudinary.com/about-gitlab-com/image/upload/v1749676998/Blog/Content%20Images/2.1-failed-test-simulation.png)\n\n![Details of the simulated failed test](https://res.cloudinary.com/about-gitlab-com/image/upload/v1749676998/Blog/Content%20Images/2.2-failed-test-simulation-details.png)\n\nOnce we've verified this simulated test failure, we can use `git revert` to roll back that commit. \n\n```shell\ngit revert\n```\n\n## Add and test a new feature\n\nLet’s put what you've learned together by creating a new feature in the air quality application and then writing a test for that feature using Catch2. The new feature will fetch the current weather forecast for the provided zip code.\n\nFirst, we'll define a `Weather` struct and add the function prototype in our `functions.h` file (inside the `#endif`):\n\n```cpp\n\nstruct Weather {\n   std::string main;\n   std::string description;\n   double temperature;\n};\n\nWeather getCurrentWeather(const std::string& apiKey, double lat, double lon);\n```\n\nThen, we implement the `getCurrentWeather` function in `functions.cpp`. This function calls the OpenWeatherMap API to retrieve the current weather and parses the JSON response. This code was generated using [GitLab Duo](https://about.gitlab.com/gitlab-duo/). If you start typing `Weather getCurrentWeather(const std::string& apiKey, double lat, double lon) {` to complete the function, GitLab Duo will provide the function contents for you, line by line. \n\n![GitLab Duo completing the function contents](https://res.cloudinary.com/about-gitlab-com/image/upload/v1749676998/Blog/Content%20Images/3.get-current-weather-function-completion.png)\n\nHere's what your `getCurrentWeather()` function can look like: \n\n```cpp\n\nWeather getCurrentWeather(const std::string& apiKey, double lat, double lon) {\n   std::string url = \"http://api.openweathermap.org/data/2.5/weather?lat=\" + std::to_string(lat) + \"&lon=\" + std::to_string(lon) + \"&appid=\" + apiKey;\n   std::string response = httpRequest(url);\n   auto json = nlohmann::json::parse(response);\n   Weather weather;\n   if (!json.is_null()) {\n       weather.main = json[\"weather\"][0][\"main\"];\n       weather.description = json[\"weather\"][0][\"description\"];\n       weather.temperature = json[\"main\"][\"temp\"];\n   }\n   return weather;\n}\n```\n\nAnd, finally, we update our `main.cpp` file in the main function to output the current forecast (and converting Kelvin to Celsius for the output):  \n\n```cpp\n   Weather currentWeather = getCurrentWeather(apiKey, lat, lon);\n   if (currentWeather.main.empty()) {\n       std::cerr \u003C\u003C \"Failed to fetch current weather.\" \u003C\u003C std::endl;\n       return 1;\n   }\n\n   std::cout \u003C\u003C \"Current Weather: \" \u003C\u003C currentWeather.main \u003C\u003C \", \" \u003C\u003C currentWeather.description\n       \u003C\u003C \", temperature \" \u003C\u003C currentWeather.temperature - 273.15 \u003C\u003C \" °C\" \u003C\u003C std::endl;\n```\n\nWe can confirm that our new feature is working by building and running the application:  \n\n```shell\ncmake --build build\n./build/air_quality_app \n```\n\nAnd we should see the following output or similar in case the weather is different on the day the code is run :)\n\n```\nAir Quality Index for Zip Code 90210: 2 (Poor)\nCurrent Weather: Clouds, broken clouds, temperature 23.2 °C\n```\n\nWith all new functionality, there should be testing! We can also write a test to check whether the application is fetching and parsing a weather forecast correctly. This test checks that the function returns a list containing the correct number of forecast entries and that each entry has accurate data regarding time and temperature.\n\n```cpp\nTEST_CASE(\"Current Weather functionality\", \"[api]\") {\n   auto weather = getCurrentWeather(\"dummyApiKey\", 40.7128, -74.0060);\n   // Ensure main weather description is not empty\n   REQUIRE_FALSE(weather.main.empty());\n   // Validate that temperature is a reasonable value\n   REQUIRE(weather.temperature > 0); \n}\n```\n\nWe’ll also have to update our `mockHTTPRequest` function in `tests.cpp` to account for this new test. Modify the if-condition with a new else-if branch checking for the `weather` string in the URL:  \n\n```cpp\n// Mock HTTP request function that simulates API responses\nstd::string mockHttpRequest(const std::string &url)\n{\n   if (url.find(\"geo\") != std::string::npos)\n   {\n       // Mock response for geocoding\n       return R\"({\"lat\": 40.7128, \"lon\": -74.0060})\";\n   }\n   else if (url.find(\"air_pollution\") != std::string::npos)\n   {\n       // Mock response for air quality\n       return R\"({\"list\": [{\"main\": {\"aqi\": 2}}]})\";\n   }\n   else if (url.find(\"weather\") != std::string::npos)\n   {\n       // Mock response for current weather\n       return R\"({\n          \"weather\": [{\"main\": \"Clear\", \"description\": \"clear sky\"}],\n          \"main\": {\"temp\": 298.55}\n      })\";\n   }\n   return \"{}\";\n}\n```\n\nAnd verify that our tests are working by rebuilding and running our tests:  \n\n```shell\ncmake --build build \n./build/tests\n```\n\nAll tests should pass, including the new one for Current Weather Functionality. \n\n## Optimize tests.cpp with sections\n\nTo better organize our tests as the project grows and categorize each functionality, we can use Catch2’s `SECTION` macro. The `SECTION` macro allows you to define logically separate test scenarios within a single test case, providing a clean way to test different behaviors or conditions without requiring multiple separate test cases or multiple files. This approach keeps related tests bundled together and also improves test maintainability by allowing shared setup code to be executed repeatedly for each section.\n\nSince some of our functionality is preprocessing data to retrieve information, let’s section our tests as such:\n- preprocessing steps: \n\t- API key validation\n\t- geocoding validation\n-  API data retrieval:\n\t- air pollution retrieval \n\t- forecast retrieval\n\nHere’s what our `tests.cpp` will look like if organized by sections: \n\n```cpp\n#include \"functions.h\"\n#include \u003Ccatch2/catch_test_macros.hpp>\n#include \u003Cstring>\n\n// Mock HTTP request function that simulates API responses\nstd::string mockHttpRequest(const std::string &url)\n{\n   if (url.find(\"geo\") != std::string::npos)\n   {\n       // Mock response for geocoding\n       return R\"({\"lat\": 40.7128, \"lon\": -74.0060})\";\n   }\n   else if (url.find(\"air_pollution\") != std::string::npos)\n   {\n       // Mock response for air quality\n       return R\"({\"list\": [{\"main\": {\"aqi\": 2}}]})\";\n   }\n   else if (url.find(\"weather\") != std::string::npos)\n   {\n       // Mock response for current weather\n       return R\"({\n          \"weather\": [{\"main\": \"Clear\", \"description\": \"clear sky\"}],\n          \"main\": {\"temp\": 298.55}\n      })\";\n   }\n   return \"{}\";\n}\n\n// Overriding the actual httpRequest function with the mockHttpRequest for testing\nstd::string httpRequest(const std::string &url)\n{\n   return mockHttpRequest(url);\n}\n\n// Preprocessing Steps\nTEST_CASE(\"Preprocessing Steps\", \"[preprocessing]\") {\n   SECTION(\"API Key Retrieval\") {\n       // Set the API_KEY environment variable for testing\n       setenv(\"API_KEY\", \"test_key\", 1);\n       // Test if the key is retrieved correctly\n       REQUIRE_FALSE(getApiKey().empty());\n   }\n\n   SECTION(\"Geocode Functionality\") {\n       std::string apiKey = \"test_key\";\n       std::pair\u003Cdouble, double> coordinates = geocodeZipcode(\"90210\", apiKey);\n       // Check latitude\n       REQUIRE(coordinates.first == 40.7128);\n       // Check longitude \n       REQUIRE(coordinates.second == -74.0060);\n   }\n}\n\n// API Data Retrieval\nTEST_CASE(\"API Data Retrieval\", \"[data_retrieval]\") {\n   SECTION(\"Air Quality Functionality\") {\n       std::string apiKey = \"test_key\";\n       double lat = 40.7128;\n       double lon = -74.0060;\n       std::string response = fetchAirQuality(lat, lon, apiKey);\n       // Check the response\n       REQUIRE(response == R\"({\"list\": [{\"main\": {\"aqi\": 2}}]})\");\n   }\n\n   SECTION(\"Current Weather Functionality\") {\n       auto weather = getCurrentWeather(\"dummyApiKey\", 40.7128, -74.0060);\n       // Ensure main weather description is not empty\n       REQUIRE_FALSE(weather.main.empty());\n       // Validate that temperature is a reasonable value\n       REQUIRE(weather.temperature > 0);\n   }\n}\n```\n\nRebuild the code and run the tests again to verify.\n\n```shell\ncmake --build build \n./build/tests\n```\n\n## Next steps\n\nIn this post, we covered how to integrate unit testing into a `C++` project using Catch2 testing framework and GitLab CI/CD and set up basic tests for our reference air quality application project.\n\nTo explore these concepts further, you can check out the [Catch2 documentation](https://github.com/catchorg/Catch2) and [GitLab's Unit test report examples documentation](https://docs.gitlab.com/ee/ci/testing/unit_test_report_examples.html). \n\nFor an advanced async exercise, you could build upon this project by using GitLab Duo to implement a feature that retrieves and analyzes historical air quality data and add code quality checks into the CI/CD pipeline. Happy coding! \n",[827,967,968,675,709],"testing","CI",{"slug":970,"featured":91,"template":788},"develop-c-unit-testing-with-catch2-junit-and-gitlab-ci",{"category":721,"slug":725,"posts":972},[973,987,999],{"content":974,"config":985},{"title":975,"description":976,"authors":977,"heroImage":980,"date":981,"body":982,"category":725,"tags":983},"How we decreased GitLab repo backup times from 48 hours to 41 minutes","Learn how we tracked a performance bottleneck to a 15-year-old Git function and fixed it, leading to enhanced efficiency that supports more robust backup strategies and can reduce risk.",[978,979],"Karthik Nayak","Manuel Kraft","https://res.cloudinary.com/about-gitlab-com/image/upload/v1750097166/Blog/Hero%20Images/Blog/Hero%20Images/REFERENCE%20-%20display%20preview%20for%20blog%20images%20%282%29_2pKf8RsKzAaThmQfqHIaa7_1750097166565.png","2025-06-05","Repository backups are a critical component of any robust disaster recovery strategy. However, as repositories grow in size, the process of creating reliable backups becomes increasingly challenging.  Our own [Rails repository](https://gitlab.com/gitlab-org/gitlab) was taking 48 hours to back up — forcing impossible choices between backup frequency and system performance. We wanted to tackle this issue for our customers and for our own users internally. \n\nUltimately, we traced the issue to a 15-year-old Git function with O(N²) complexity and fixed it with an algorithmic change, __reducing backup times exponentially__. The result: lower costs, reduced risk, and backup strategies that actually scale with your codebase.\n\nThis turned out to be a Git scalability issue that affects anyone with large repositories. Here's how we tracked it down and fixed it. \n\n## Backup at scale\n\nFirst, let's look at the problem. As organizations scale their repositories and backups grow more complex, here are some of the challenges they can face:\n\n* **Time-prohibitive backups:** For very large repositories, creating a repository backup could take several hours, which can hinder the ability to schedule regular backups. \n* **Resource intensity:** Extended backup processes can consume substantial server resources, potentially impacting other operations.\n* **Backup windows:** Finding adequate maintenance windows for such lengthy processes can be difficult for teams running 24/7 operations.\n* **Increased failure risk:** Long-running processes are more susceptible to interruptions from network issues, server restarts, and system errors, which can force teams to restart the entire very long backup process from scratch.\n* **Race conditions:** Because it takes a long time to create a backup, the repository might have changed a lot during the process, potentially creating an invalid backup or interrupting the backup because objects are no longer available.\n\nThese challenges can lead to compromising on backup frequency or completeness – an unacceptable trade-off when it comes to data protection. Extended backup windows can force customers into workarounds. Some might adopt external tooling, while others might reduce backup frequency, resulting in potential inconsistent data protection strategies across organizations.\n\nNow, let's dig into how we identified a performance bottleneck, found a resolution, and deployed it to help cut backup times.\n\n## The technical challenge\n\nGitLab's repository backup functionality relies on the [`git bundle create`](https://git-scm.com/docs/git-bundle) command, which captures a complete snapshot of a repository, including all objects and references like branches and tags. This bundle serves as a restoration point for recreating the repository in its exact state.\n\nHowever, the implementation of the command suffered from poor scalability related to reference count, creating a performance bottleneck. As repositories accumulated more references, processing time increased exponentially. In our largest repositories containing millions of references, backup operations could extend beyond 48 hours.\n\n### Root cause analysis\n\nTo identify the root cause of this performance bottleneck, we analyzed a flame graph of the command during execution.\n\n![Flame graph showing command during execution](https://res.cloudinary.com/about-gitlab-com/image/upload/v1750097176/Blog/Content%20Images/Blog/Content%20Images/image1_aHR0cHM6_1750097176388.jpg)\n\nA flame graph displays the execution path of a command through its stack trace. Each bar corresponds to a function in the code, with the bar's width indicating how much time the command spent executing within that particular function.\n\nWhen examining the flame graph of `git bundle create` running on a repository with 10,000 references, approximately 80% of the execution time is consumed by the `object_array_remove_duplicates()` function. This function was introduced to Git in the [commit b2a6d1c686](https://gitlab.com/gitlab-org/git/-/commit/b2a6d1c686) (bundle: allow the same ref to be given more than once, 2009-01-17).\n\nTo understand this change, it's important to know that `git bundle create` allows users to specify which references to include in the bundle. For complete repository bundles, the `--all` flag packages all references.\n\nThe commit addressed a problem where users providing duplicate references through the command line – such as `git bundle create main.bundle main main` - would create a bundle without properly handling the duplicated main reference. Unbundling this bundle in a Git repository would break, because it tries to write the same ref twice. The code to avoid duplication uses nested `for` loops that iterate through all references to identify duplicates. This O(N²) algorithm becomes a significant performance bottleneck in repositories with large reference counts, consuming substantial processing time.\n\n### The fix: From O(N²) to efficient mapping\n\nTo resolve this performance issue, we contributed an upstream fix to Git that replaces the nested loops with a map data structure. Each reference is added to the map, which automatically ensures only a single copy of each reference is retained for processing.\n\nThis change dramatically enhances the performance of `git bundle create` and enables much better scalability in repositories with large reference counts. Benchmark testing on a repository with 10,000 references demonstrates a 6x performance improvement.\n\n```shell\nBenchmark 1: bundle (refcount = 100000, revision = master)\n  Time (mean ± σ): \t14.653 s ±  0.203 s\t[User: 13.940 s, System: 0.762 s]\n  Range (min … max):   14.237 s … 14.920 s\t10 runs\n\nBenchmark 2: bundle (refcount = 100000, revision = HEAD)\n  Time (mean ± σ):  \t2.394 s ±  0.023 s\t[User: 1.684 s, System: 0.798 s]\n  Range (min … max):\t2.364 s …  2.425 s\t10 runs\n\nSummary\n  bundle (refcount = 100000, revision = HEAD) ran\n\t6.12 ± 0.10 times faster than bundle (refcount = 100000, revision = master)\n```\n\nThe patch was accepted and [merged](https://gitlab.com/gitlab-org/git/-/commit/bb74c0abbc31da35be52999569ea481ebd149d1d) into upstream Git. At GitLab, we backported this fix to ensure our customers could benefit immediately, without waiting for the next Git release.\n\n## The result: Dramatically decreased backup times\n\nThe performance gains from this improvement have been nothing short of transformative:\n\n* **From 48 hours to 41 minutes:** Creating a backup of our largest repository (`gitlab-org/gitlab`) now takes just 1.4% of the original time.\n* **Consistent performance:** The improvement scales reliably across repository sizes.\n* **Resource efficiency:** We significantly reduced server load during backup operations.\n* **Broader applicability:** While backup creation sees the most dramatic improvement, all bundle-based operations that operate on many references benefit.\n\n## What this means for GitLab customers\n\nFor GitLab customers, this enhancement delivers immediate and tangible benefits on how organizations approach repository backup and disaster recovery planning:\n* **Transformed backup strategies**   \n  * Enterprise teams can establish comprehensive nightly schedules without impacting development workflows or requiring extensive backup windows.   \n  * Backups can now run seamlessly in the background during nightly schedules, instead of needing to be dedicated and lengthy.  \n* **Enhanced business continuity**  \n  * With backup times reduced from days to minutes, organizations significantly minimize their recovery point objectives (RPO). This translates to reduced business risk – in a disaster scenario, you're potentially recovering hours of work instead of days.  \n* **Reduced operational overhead**   \n  * Less server resource consumption and shorter maintenance windows.  \n  * Shorter backup windows mean reduced compute costs, especially in cloud environments, where extended processing time translates directly to higher bills.  \n* **Future-proofed infrastructure**   \n  * Growing repositories no longer force difficult choices between backup frequency and system performance.   \n  * As your codebase expands, your backup strategy can scale seamlessly alongside it\n\nOrganizations can now implement more robust backup strategies without compromising on performance or completeness. What was once a challenging trade-off has become a straightforward operational practice.\n\nStarting with the [GitLab 18.0](https://about.gitlab.com/releases/2025/05/15/gitlab-18-0-released/) release, all GitLab customers regardless of their license tier can already fully take advantage of these improvements for their [backup](https://docs.gitlab.com/administration/backup_restore/backup_gitlab/) strategy and execution. There is no further change in configuration required.\n\n## What's next\n\nThis breakthrough is part of our ongoing commitment to scalable, enterprise-grade Git infrastructure. While the improvement of 48 hours to 41 minutes for backup creation time represents a significant milestone, we continue to identify and address performance bottlenecks throughout our stack.\n\nWe're particularly proud that this enhancement was contributed upstream to the Git project, benefiting not just GitLab users but the broader Git community. This collaborative approach to development ensures that improvements are thoroughly reviewed, widely tested, and available to all.\n\n> Deep infrastructure work like this is how we approach performance at GitLab. Join the GitLab 18 virtual launch event to see what other fundamental improvements we're shipping. [Register today!](https://about.gitlab.com/eighteen/)",[984,903,757,943,475],"git",{"slug":986,"featured":91,"template":788},"how-we-decreased-gitlab-repo-backup-times-from-48-hours-to-41-minutes",{"content":988,"config":997},{"title":989,"description":990,"authors":991,"heroImage":898,"date":993,"body":994,"category":725,"tags":995},"Tutorial: Secure BigQuery data publishing with GitLab ","Learn how to create repeatable, auditable, and efficient processes for automating and securing BigQuery data exports.",[992],"Regnard Raquedan","2025-03-25","GitLab offers a powerful solution for automating and securing [BigQuery](https://cloud.google.com/bigquery) data exports. This integration transforms manual exports into repeatable, auditable processes that can eliminate security vulnerabilities while saving valuable time. This tutorial explains how to implement this solution so you can quickly reduce manual operations, permission issues, and security concerns with just a few lines of GitLab YAML code.\n\nFollow along with this step-by-step video:\n\n\u003C!-- blank line -->\n\u003Cfigure class=\"video_container\">\n  \u003Ciframe src=\"https://www.youtube.com/embed/gxXX-ItAreo?si=FijY9wMVppCW-18q\" frameborder=\"0\" allowfullscreen=\"true\">\u003C/iframe>\n\u003C/figure>\n\u003C!-- blank line -->\n\n## The solution architecture\n\nOur solution leverages GitLab CI/CD pipelines to automate the secure export of data from BigQuery to Google Cloud Storage. Here's the high-level architecture:\n\n1. SQL code is stored and version-controlled in GitLab.  \n2. After code review and approval, GitLab CI/CD pipeline executes the code.  \n3. The pipeline authenticates with Google Cloud.  \n4. SQL queries are executed against BigQuery.  \n5. Results are exported as CSV files to Google Cloud Storage.  \n6. Secure links to these files are provided for authorized consumption.\n\n## Prerequisites\n\nBefore we begin, ensure you have:\n\n* **Google Cloud APIs enabled:** BigQuery API and Cloud Storage API  \n* **Service account** with appropriate permissions:  \n  * BigQuery Job User  \n  * Storage Admin  \n  * **Note:** For this demo, we're using the service account approach for authentication, which is simpler to set up. For production environments, you might consider using GitLab's identity and access management integration with Google Cloud. This integration leverages Workload Identity Federation, which provides enhanced security and is more suitable for enterprise customers and organizations.  \n* **GitLab project** ready to store your SQL code and pipeline configuration\n\n## Step-by-step implementation\n\n**1. Configure Google Cloud credentials.**\n\nFirst, set up the necessary environment variables in your GitLab project:\n\n- Go to your **GitLab project > Settings > CI/CD**.  \n- Expand the **Variables** section.  \n- Add the following variables:  \n   * `GCS_BUCKET`: Your Google Cloud Storage bucket name  \n   * `GCP_PROJECT_ID`: Your Google Cloud project ID  \n   * `GCP_SA_KEY`: Base64-encoded service account key (mark as masked)\n\n**2. Create your SQL query.**\n\nCreate a file named `query.sql` in your GitLab repository with your BigQuery SQL query. The query looks like this:\n\n```\n-- This query shows a list of the daily top Google Search terms.\nSELECT\n   refresh_date AS Day,\n   term AS Top_Term,\n       -- These search terms are in the top 25 in the US each day.\n   rank,\nFROM `bigquery-public-data.google_trends.top_terms`\nWHERE\n   rank = 1\n       -- Choose only the top term each day.\n   AND refresh_date >= DATE_SUB(CURRENT_DATE(), INTERVAL 2 WEEK)\n       -- Filter to the last 2 weeks.\nGROUP BY Day, Top_Term, rank\nORDER BY Day DESC\n   -- Show the days in reverse chronological order.\n\n```\n\nThis query gets the top 25 search terms from Google Trends for the current day.\n\n**3. Configure the GitLab CI/CD pipeline.**\n\nCreate a `.gitlab-ci.yml` file in your repository root:\n\n```\nimage: google/cloud-sdk:alpine\n\ninclude:\n  - template: Jobs/Secret-Detection.gitlab-ci.yml  # https://gitlab.com/gitlab-org/gitlab/blob/master/lib/gitlab/ci/templates/Jobs/Secret-Detection.gitlab-ci.yml\n\nexecute:\n  stage: deploy\n  script: \n    # Set up Google Cloud authentication and install necessary components\n    - export GOOGLE_CLOUD_CREDENTIALS=$(echo $SERVICE_ACCOUNT_KEY | base64 -d)\n    - echo $GOOGLE_CLOUD_CREDENTIALS > service-account-key.json \n    - gcloud auth activate-service-account --key-file service-account-key.json \n    - gcloud components install gsutil\n    # Set the active Google Cloud project\n    - gcloud config set project $PROJECT_ID\n    # Run the BigQuery query and export the results to a CSV file\n    - bq query --format=csv --use_legacy_sql=false \u003C test.sql > results.csv\n    # Create a Google Cloud Storage bucket if it doesn't exist\n    - gsutil ls gs://${CLOUD_STORAGE_BUCKET} || gsutil mb gs://${CLOUD_STORAGE_BUCKET}\n    # Upload the CSV file to the storage bucket\n    - gsutil cp results.csv gs://${CLOUD_STORAGE_BUCKET}/results.csv\n    # Set the access control list (ACL) to make the CSV file publicly readable\n    - gsutil acl ch -u AllUsers:R gs://${CLOUD_STORAGE_BUCKET}/results.csv\n    # Define the static URL for the CSV file\n    - export STATIC_URL=\"https://storage.googleapis.com/${CLOUD_STORAGE_BUCKET}/results.csv\"\n    # Display the static URL for the CSV file\n    - echo \"File URL = $STATIC_URL\"\n\n```\n\n**4. Run the pipeline.**\n\nNow, whenever changes are merged to your main branch, the pipeline will provide a link to the CSV file stored on the Google Cloud Storage bucket. This file contains the result of the executed SQL query that GitLab subjects to security checks.\n\n## Benefits of this approach\n\n* **Security:** Authentication is handled automatically via service accounts (or Workload Identity Federation for enhanced security in production environments).  \n* **Auditability:** All data exports are tracked through GitLab commits and pipeline logs.  \n* **Repeatability:** Consistent, predictable export process on every run, and can be scheduled.  \n* **Version control:** SQL queries are properly versioned and reviewed.  \n* **Automation:** Significantly fewer manual exports, reducing human error.\n\n## Try it today\n\nBy combining GitLab's DevSecOps capabilities with Google Cloud's BigQuery and Cloud Storage, you've now automated and secured your data publishing workflow. This approach reduces manual operations, resolves permission headaches, and addresses security concerns – all achieved with just a few lines of GitLab CI code.\n\n> Use this tutorial's [complete code example](https://gitlab.com/gitlab-partners-public/google-cloud/demos/big-query-data-publishing) to get started now.",[709,475,827,916,231,996],"google",{"slug":998,"featured":91,"template":788},"tutorial-secure-bigquery-data-publishing-with-gitlab",{"content":1000,"config":1008},{"title":1001,"description":1002,"authors":1003,"heroImage":795,"date":1004,"body":1005,"category":725,"tags":1006},"Automating container image migration from Amazon ECR to GitLab","When platform teams move their CI/CD to GitLab, migrating container images shouldn't be the bottleneck. Follow this step-by-step guide to automate the pipeline migration process.",[872],"2025-02-13","\"We need to migrate hundreds of container images from Amazon Elastic Container Registry (ECR) to GitLab. Can you help?\" This question kept coming up in conversations with platform engineers. They were modernizing their DevSecOps toolchain with GitLab but got stuck when faced with moving their container images. While each image transfer is simple, the sheer volume made it daunting.\n\nOne platform engineer perfectly said, \"I know exactly what needs to be done – pull, retag, push. But I have 200 microservices, each with multiple tags. I can't justify spending weeks on this migration when I have critical infrastructure work.\"\n\n## The challenge\n\nThat conversation sparked an idea. What if we could automate the entire process? When platform teams move their [CI/CD](https://about.gitlab.com/topics/ci-cd/) to GitLab, migrating container images shouldn't be the bottleneck. The manual process is straightforward but repetitive – pull each image, retag it, and push it to GitLab's Container Registry. Multiply this by dozens of repositories and multiple tags per image, and you're looking at days or weeks of tedious work.\n\n## The solution\n\nWe set out to create a GitLab pipeline that would automatically do all this heavy lifting. The goal was simple: Give platform engineers a tool they could set up in minutes and let run overnight, waking up to find all their images migrated successfully.\n\n### Setting up access\n\nFirst things first – security. We wanted to ensure teams could run this migration with minimal AWS permissions. Here's the read-only identity and access management (IAM) policy you'll need:\n\n```json\n{\n    \"Version\": \"2012-10-17\",\n    \"Statement\": [\n        {\n            \"Effect\": \"Allow\",\n            \"Action\": [\n                \"ecr:GetAuthorizationToken\",\n                \"ecr:BatchCheckLayerAvailability\",\n                \"ecr:GetDownloadUrlForLayer\",\n                \"ecr:DescribeRepositories\",\n                \"ecr:ListImages\",\n                \"ecr:DescribeImages\",\n                \"ecr:BatchGetImage\"\n            ],\n            \"Resource\": \"*\"\n        }\n    ]\n}\n```\n\n### GitLab configuration\n\nWith security handled, the next step is setting up GitLab. We kept this minimal - you'll need to configure these variables in your CI/CD settings:\n\n```\nAWS_ACCOUNT_ID: Your AWS account number\nAWS_DEFAULT_REGION: Your ECR region\nAWS_ACCESS_KEY_ID: [Masked]\nAWS_SECRET_ACCESS_KEY: [Masked]\nBULK_MIGRATE: true\n```\n\n### The migration pipeline\n\nNow for the interesting part. We built the pipeline using Docker-in-Docker to handle all the image operations reliably:\n\n```yaml\nimage: docker:20.10\nservices:\n  - docker:20.10-dind\n\nbefore_script:\n  - apk add --no-cache aws-cli jq\n  - aws sts get-caller-identity\n  - aws ecr get-login-password | docker login --username AWS --password-stdin\n  - docker login -u ${CI_REGISTRY_USER} -p ${CI_REGISTRY_PASSWORD} ${CI_REGISTRY}\n```\n\nThe pipeline works in three phases, each building on the last:\n\n1. Discovery\n\nFirst, it finds all your repositories:\n\n```bash\nREPOS=$(aws ecr describe-repositories --query 'repositories[*].repositoryName' --output text)\n```\n\n2. Tag enumeration\n\nThen, for each repository, it gets all the tags:\n\n```bash\nTAGS=$(aws ecr describe-images --repository-name $repo --query 'imageDetails[*].imageTags[]' --output text)\n```\n\n3. Transfer\n\nFinally, it handles the actual migration:\n\n```bash\ndocker pull ${AWS_ACCOUNT_ID}.dkr.ecr.${AWS_DEFAULT_REGION}.amazonaws.com/${repo}:${tag}\ndocker tag ${AWS_ACCOUNT_ID}.dkr.ecr.${AWS_DEFAULT_REGION}.amazonaws.com/${repo}:${tag} ${CI_REGISTRY_IMAGE}/${repo}:${tag}\ndocker push ${CI_REGISTRY_IMAGE}/${repo}:${tag}\n```\n\n## What you get\n\nRemember that platform engineer who didn't want to spend weeks on migration? Here's what this solution delivers:\n\n- automated discovery and migration of all repositories and tags\n- consistent image naming between ECR and GitLab\n- error handling for failed transfers\n- clear logging for tracking progress\n\nInstead of writing scripts and babysitting the migration, the platform engineer could focus on more valuable work.\n\n## Usage\n\nGetting started is straightforward:\n\n1. Copy the `.gitlab-ci.yml` to your repository.\n2. Configure the AWS and GitLab variables.\n3. Set `BULK_MIGRATE` to \"true\" to start the migration.\n\n## Best practices\n\nThrough helping teams with their migrations, we've learned a few things:\n\n- Run during off-peak hours to minimize the impact on your team.\n- Keep an eye on the pipeline logs - they'll tell you if anything needs attention.\n- Don't decommission ECR until you've verified all images transferred successfully.\n- For very large migrations, consider adding rate limiting to avoid overwhelming your network\n\nWe've open-sourced this pipeline in our public GitLab repository because we believe platform engineers should spend time building valuable infrastructure, not copying container images. Feel free to adapt it for your needs or ask questions about implementation.\n\n> #### Get started with this and other package components with our [CI/CD Catalog documentation](https://gitlab.com/explore/catalog/components/package).",[109,825,827,475,757,1007],"solutions architecture",{"slug":1009,"featured":91,"template":788},"automating-container-image-migration-from-amazon-ecr-to-gitlab",{"category":732,"slug":736,"posts":1011},[1012,1023,1035],{"content":1013,"config":1021},{"title":1014,"description":1015,"authors":1016,"heroImage":1017,"date":1018,"body":1019,"category":736,"tags":1020},"GitLab at Next '25: Transforming app modernization","GitLab participated in Google Cloud Next ‘25 and received a fifth consecutive Google Cloud Technology Partner of the Year recognition.",[992],"https://res.cloudinary.com/about-gitlab-com/image/upload/v1749663121/Blog/Hero%20Images/LogoLockupPlusLight.png","2025-04-11","GitLab's presence at Google Cloud Next '25 highlighted our strong partnership with Google Cloud and our joint commitment to accelerating software development and delivery. We were recognized again as a Technology Partner of the Year, and included in key enterprise initiatives like Google Distributed Cloud (GDC) Build Partners and [Startup Perks from Google Cloud](https://cloud.google.com/blog/topics/startups/why-global-startups-are-gathering-at-google-cloud-next25?e=13802955). Our team members demonstrated for attendees how GitLab is positioned to be a critical DevSecOps service for Google Cloud customers.\n\n## Continuing our award-winning partnership excellence\n\n\u003Cimg src=\"//images.ctfassets.net/r9o86ar0p03f/3U5CR3z47snaXMTQdbWteA/d8f97c2b599f6cae19aed99f8b02ad4a/GOOGLE_CLOUD_-_RED_CARPET_-_GITLAB_-_3.jpg\" alt=\"GitLab team at Google Cloud Next '25\" align=\"left\" width=\"400px\" style=\"padding-right: 20px; padding-bottom: 10px\"/>\n\nWe're thrilled to announce that GitLab has once again been named a [Google Cloud Technology Partner of the Year award winner](https://about.gitlab.com/press/releases/2025-04-08-gitlab-wins-a-google-cloud-technology-partner-of-the-year-award-for-devops/), marking our fifth consecutive time receiving this prestigious honor. This remarkable achievement reaffirms our position as Google Cloud's primary DevOps partner, consistently delivering exceptional value year after year. The continued recognition highlights how our collaboration with Google Cloud creates tangible business outcomes for customers, enabling organizations across industries to build, secure, and deploy applications with efficiency and confidence.\n\n## Google Distributed Cloud: DevSecOps for highly regulated environments\n\nAnother significant milestone announced at Next '25 was GitLab's \"Google Cloud Ready - Distributed Cloud\" certification. This designation enables organizations to implement GitLab in air-gapped environments, addressing critical security and compliance requirements.\n\nAs an end-to-end DevSecOps solution available on Google Distributed Cloud, GitLab enables sovereign development and operations for workloads critical to national security and regulatory compliance. This integration is particularly valuable for government agencies and financial institutions that require the highest levels of data sovereignty while maintaining modern development practices.\n\n## GitLab perks for Google Startups\n\nGitLab is a Featured Partner of the new Startup Perks program from Google Cloud. This partnership ties up with our own [GitLab for Startups](https://about.gitlab.com/solutions/startups/google-cloud/) and is meant to jumpstart new tech ventures with key DevSecOps capabilities that can help with fast growth and scaling.\n\nAs one of the [Featured Perks partners](https://cloud.google.com/startup/perks), eligible startups can get free or discounted access to one year of [GitLab Ultimate](https://about.gitlab.com/pricing/ultimate/) for 20 licenses. For seed or early stage startups, this benefit can help ensure collaboration, efficiency, and security without sacrificing speed and agility.\n\n## Thoughts from the dais\n\nGitLab experts shared valuable insights across multiple speaking sessions at Next '25, delivering practical knowledge on AI-powered DevSecOps, platform engineering, and cloud application delivery:\n\n* __[AI DevOps panel](https://cloud.withgoogle.com/next/25/session-library?session=BRK2-163&utm_source=copylink&utm_medium=unpaidsoc&utm_campaign=FY25-Q2-global-EXP106-physicalevent-er-next25-mc&utm_content=reg-is-live-next-homepage-social-share&utm_term=-):__ Mike Flouton, GitLab Vice President of Product Management, joined industry leaders to discuss how AI code assist tools boost productivity while enhancing application performance.\n\n* __[Software Logistics - The Missing Link in Modern Platform Engineering](https://cloud.withgoogle.com/next/25/session-library?session=CT2-16&utm_source=copylink&utm_medium=unpaidsoc&utm_campaign=FY25-Q2-global-EXP106-physicalevent-er-next25-mc&utm_content=reg-is-live-next-homepage-social-share&utm_term=-):__ GitLab Field CTO Lee Faus explored how effective software logistics create the foundation for successful platform engineering initiatives.\n\n* __[Revolutionizing Cloud Application Delivery with Intelligent Agents](https://cloud.withgoogle.com/next/25/session-library?session=CT2-17&utm_source=copylink&utm_medium=unpaidsoc&utm_campaign=FY25-Q2-global-EXP106-physicalevent-er-next25-mc&utm_content=reg-is-live-next-homepage-social-share&utm_term=-):__ Faus also demonstrated how intelligent agents are transforming cloud application delivery pipelines.\n\n## Engaging attendees across Next '25\n\nIn addition to our speaking sessions, GitLab maintained a strong presence throughout Next '25. At our booth #2170 on the expo floor, our team engaged with hundreds of attendees through demonstrations and lightning talks featuring both GitLab experts and partners like Arctiq and SADA.\n\nThe Google Cloud Makerspace's Dev Tools Pantry became a hub of innovation and collaboration. John Coghlan, Director of Developer Advocacy, observed: \"It was great to connect with many GitLab and Google Cloud customers in the Dev Tools Pantry in the Makerspace. We loved seeing the creative solutions that people came up with around developer experience and simplified deployments using GitLab and Google Cloud as their ingredients.\"\n\nThese hands-on experiences showcased how GitLab's DevSecOps solutions integrate well with Google Cloud services, with our AI-powered capabilities demonstrations drawing particular interest from attendees looking to enhance developer productivity and application security.\n\n## GitLab and Google Cloud: Transforming the future together\n\nThe energy witnessed at Next '25 exemplifies why GitLab and Google Cloud make such powerful partners. Together, we help organizations to transform how they build, secure, and deploy applications through:\n\n* AI-assisted development capabilities and collaborative workflows that can help accelerate innovation in Google Cloud environments\n\n* Shift-left security approach that integrates with Google Cloud's security-first architecture to identify vulnerabilities early in the development lifecycle\n\n* Flexible deployment options and comprehensive observability that work harmoniously with Google Cloud infrastructure to help streamline operations\n\nAs demonstrated at Next '25, the GitLab and Google Cloud partnership delivers tangible advantages for development teams facing real-world challenges – whether accelerating AI adoption, strengthening security in regulated environments, or streamlining complex deployment pipelines. The technical integration points and customer success stories shared throughout the event underscore that this collaboration continues to produce practical solutions that matter.\n\n> #### Discover how GitLab and Google Cloud can transform your application development experience at [GitLab's Google Cloud partnership page](https://about.gitlab.com/partners/technology-partners/google-cloud-platform/).",[996,475,277,282,736],{"slug":1022,"featured":6,"template":788},"gitlab-at-next-25-transforming-app-modernization",{"content":1024,"config":1033},{"title":1025,"description":1026,"authors":1027,"heroImage":1029,"date":1030,"body":1031,"category":736,"tags":1032},"Introducing The Source: Insights for the future of software development","Explore our new publication for transformative software development strategies and expert advice on emerging technologies.",[1028],"Chandler Gibbons","https://res.cloudinary.com/about-gitlab-com/image/upload/v1749674616/Blog/Hero%20Images/blog-image-template-1800x945__1_.png","2024-10-29","Modern software development is transforming the way organizations create, deliver, and scale business value. Teams must be able to build solutions quickly and efficiently while navigating rising security threats, emerging technologies, and increasingly complex compliance demands.\n\nToday, GitLab is launching [The Source](https://about.gitlab.com/the-source/), a new publication that covers the evolution of software development as an engine for business success. We offer regular insights into the future of software development, supported by original research and analysis from our subject matter experts and thought leaders.\n\nOn The Source, you will find answers to questions such as:  \n* How can leaders measure the ROI of AI across the software development lifecycle?  \n* What’s the best way to ensure security and compliance across the entire software supply chain?\n* What types of efficiencies will teams see from platform and toolchain consolidation?\n\nHere’s a sample of what's on The Source today:\n\n**4 steps for measuring the impact of AI**\n\n\"Evaluating the productivity of AI-enhanced coding requires a more nuanced approach than traditional metrics such as lines of code, code commits, or task completion. It necessitates shifting the focus to real-world business outcomes that balance development speed, software quality, and security.\"  \n- [Learn the 4 steps from AI expert Taylor McCaslin.](https://about.gitlab.com/the-source/ai/4-steps-for-measuring-the-impact-of-ai/)\n\n**Addressing the root cause of common security frustrations**\n\n\"DevSecOps promises better integration between engineering and security, but it’s clear that frustrations and misalignment persist. That’s because these challenges are symptoms of a larger problem with how organizations view security, as well as how teams work together and how they allocate time to security.\"  \n- [Solve this disconnect with expert advice from GitLab CISO Josh Lemos.](https://about.gitlab.com/the-source/security/security-its-more-than-culture-addressing-the-root-cause-of-common-security/)\n\n**Driving business results with platform engineering**\n\n\"Platform engineering aims to normalize and standardize developer workflows by providing developers with optimized 'golden paths' for most of their workloads and flexibility to define exceptions for the rest.\"  \n- [Discover GitLab Field CTO Brian Wald's best practices for platform engineering success.](https://about.gitlab.com/the-source/platform-and-infrastructure/driving-business-results-with-platform-engineering/)\n\n## Make The Source your decision-making partner\n\nVisit [The Source](https://about.gitlab.com/the-source/) today to explore the latest insights, get answers to your leadership questions, and learn something new to share with your teams. You can also subscribe to our newsletter for regular updates directly to your inbox. Join our community of forward-thinking technology leaders and help shape the future of software development.",[675,767,736,709],{"slug":1034,"featured":91,"template":788},"introducing-the-source-insights-for-the-future-of-software-development",{"content":1036,"config":1047},{"title":1037,"description":1038,"authors":1039,"heroImage":1041,"date":1042,"body":1043,"category":736,"tags":1044},"GitLab named a Leader in the 2024 Gartner Magic Quadrant for DevOps Platforms","GitLab is positioned highest in Ability to Execute and Completeness of Vision, which we believe is recognition of our customers’ success and our continued innovation in the DevOps category.",[1040],"Ashley Kramer","https://res.cloudinary.com/about-gitlab-com/image/upload/v1749662523/Blog/Hero%20Images/Gartner_DevOps_Blog_Post_Cover_Image_1800x945__2_.png","2024-09-05","DevOps was originally just a concept, a methodology for delivering software faster by bringing traditionally disparate teams together. It was a response to all the issues caused by the separation of those who built software and those who deployed it. \n\nAt GitLab, we iterated on that concept: Instead of stitching together tools to create a complex DevOps toolchain, a [single DevOps platform](https://about.gitlab.com/platform/) would result in tighter collaboration, greater automation, and more scalable and standardized processes. \n\nWe believe that strategy, which focuses on our customers' success, was correct. In the second iteration of the [Gartner Magic Quadrant for DevOps Platforms](https://about.gitlab.com/gartner-magic-quadrant/), we are once again named a Leader by Gartner and this time, positioned highest on both axes: Ability to Execute and Completeness of Vision.\n\n![Gartner MQ for DevOps Platforms 2024 image](https://res.cloudinary.com/about-gitlab-com/image/upload/v1749674334/Blog/Content%20Images/figure1.png)\n\n> Download the [2024 Gartner® Magic Quadrant™ for DevOps Platforms report](https://about.gitlab.com/de-de/gartner-magic-quadrant/).\n\nToday’s software organizations must contend with increasing security threats, complex compliance requirements, and carefully adopting new technologies such as generative AI. This is in addition to simply delivering on their promises of scalable services and continued innovation to their own customers.\n\nGitLab helps our customers face these challenges and become leaders in their own industries. With our AI-powered DevSecOps platform, they are shifting security left, enabling visibility throughout the development lifecycle, and bringing together all the roles and responsibilities needed to deliver the software that powers our world.\n\n## Furthering the DevOps vision\n\nOur work here isn’t done. We will continue to innovate on the DevOps vision and advance our DevSecOps platform in two ways.\n\nFirst, we want to invite even more teams to collaborate on the same platform, with specific features for those involved in [Agile planning](https://about.gitlab.com/blog/categories/agile-planning/), [data science](https://about.gitlab.com/topics/devops/the-role-of-ai-in-devops/), and [observability and application monitoring](https://about.gitlab.com/solutions/gitlab-observability/).\n\nSecond, we strive to make our platform adoption and deployment options even more flexible to meet our customers’ diverse needs. This includes investing in [GitLab Dedicated](https://about.gitlab.com/dedicated/), our single-tenant, hosted option, so companies in highly regulated industries can have the simplicity of SaaS and the power of all the latest features and capabilities, while adhering to the compliance needs of isolated infrastructure.\n\n## Helping organizations build secure software \n\nBeyond building a better collaboration platform for delivering software, one of the most important things we do at GitLab is help organizations build more secure and compliant software. Our vision here sets us apart, as GitLab integrates [security scanning](https://about.gitlab.com/solutions/security-compliance/) at the point of code commit, not when applications are ready for release. This helps teams catch vulnerabilities sooner, leading to faster release cycles. GitLab also makes compliance easy with policy guardrails and automatically generating [a software bill of materials](https://about.gitlab.com/blog/the-ultimate-guide-to-sboms/).\n\nWe know our customers face more security threats as their own software surface attack area increases. This is why, in the next 12 months, we plan to continue improving our SAST scanners, add additional policy controls, and build [an upcoming native secrets manager](https://about.gitlab.com/blog/gitlab-native-secrets-manager-to-give-software-supply-chain-security-a-boost/).\n\n## Leading with AI throughout the SDLC\n\nOur vision is to also be a leader in AI – both in enabling our customers to build innovative software with AI, and also to do it with privacy-first AI technology. AI represents a generational leap forward with an incredible amount of opportunity when integrated throughout the software development lifecycle. As we innovate, we are doing so responsibly. We’ve heard our customers’ concerns loud and clear: They want [AI with guardrails](https://about.gitlab.com/the-source/ai/velocity-with-guardrails-ai-automation/), [AI that’s transparent](https://about.gitlab.com/ai-transparency-center/), and AI that respects their code and intellectual property.\n\nWe are committed to building [GitLab Duo](https://about.gitlab.com/gitlab-duo/), a suite of AI-powered features for our DevSecOps platform that are all of these: comprehensive, privacy-first, and built to support the entire software development lifecycle.\n\nWe believe this commitment and our GitLab Duo features are why, recently, [Gartner® also named us a Leader in its first Magic Quadrant™ for AI Code Assistants](https://about.gitlab.com/blog/gitlab-named-a-leader-in-2024-gartner-magic-quadrant-for-ai-code-assistants/).\n\nWe are honored by this recognition and see it as a sign to continue listening to you  –  our customers – because that is what drives our vision, product roadmap, and commitment in delivering the best DevSecOps platform.\n\n> Download the [2024 Gartner® Magic Quadrant™ for DevOps Platforms report](https://about.gitlab.com/gartner-magic-quadrant/).\n\n***Source: Gartner, Magic Quadrant for DevOps Platforms, Keith Mann, Thomas Murphy, Bill Holz, George Spafford, August 2024***\n\n***GARTNER is a registered trademark and service mark of Gartner, Inc. and/or its affiliates in the U.S. and internationally, and MAGIC QUADRANT is a\nregistered trademark of Gartner, Inc. and/or its affiliates and are used herein with permission. All rights reserved.***\n\n***Gartner does not endorse any vendor, product or service depicted in its research publications, and does not advise technology users to select only those vendors with the highest ratings or other designation. Gartner research publications consist of the opinions of Gartner’s research organization and should not be construed as statements of fact. Gartner disclaims all warranties, expressed or implied, with respect to this research, including any warranties of merchantability or fitness for a particular purpose.***\n\n***This graphic was published by Gartner Inc. as part of a larger report and should be evaluated in the context of the entire document. The Gartner document is available upon request from Gartner.***",[736,1045,475,1046,709],"research","DevOps",{"slug":1048,"featured":91,"template":788},"gitlab-named-a-leader-in-the-2024-gartner-magic-quadrant-for-devops",{"category":743,"slug":747,"posts":1050},[1051,1063,1075],{"content":1052,"config":1061},{"title":1053,"description":1054,"authors":1055,"heroImage":1057,"body":1058,"date":1059,"category":747,"tags":1060},"What’s new in Git 2.50.0?","Here are contributions from GitLab's Git team and the Git community such as the git-diff-pairs(1) command and git-rev-list(1) option to perform batched reference updates.",[1056],"Justin Tobler","https://res.cloudinary.com/about-gitlab-com/image/upload/v1749663087/Blog/Hero%20Images/git3-cover.png","The Git project recently released [Git Version 2.50.0](https://lore.kernel.org/git/xmqq1prj1umb.fsf@gitster.g/T/#u). Let's look at a few notable highlights from this release, which includes contributions from the Git team at GitLab and also the wider Git community.\n## New git-diff-pairs(1) command\n\nDiffs are at the heart of every code review and show all the changes made\nbetween two revisions. GitLab shows diffs in various places, but the most\ncommon place is a merge request's [\"Changes\" tab](https://docs.gitlab.com/user/project/merge_requests/changes/).\nBehind the scenes, diff generation is powered by\n[`git-diff(1)`](https://git-scm.com/docs/git-diff). For example:\n\n```shell\n$ git diff HEAD~1 HEAD\n```\n\nThis command returns the full diff for all changed files. This might pose a scalability challenge because the number of files changed between a set of revisions could be very large and cause the command to reach self-imposed timeouts for the GitLab backend. For large change sets, it would be better if\nthere were a way to break diff computation into smaller, more digestible chunks.\n\nOne way this can be achieved is by using\n[`git-diff-tree(1)`](https://git-scm.com/docs/git-diff-tree) to retrieve info\nabout all the changed files:\n\n```shell\n$ git diff-tree -r -M --abbrev HEAD~ HEAD\n:100644 100644 c9adfed339 99acf81487 M      Documentation/RelNotes/2.50.0.adoc\n:100755 100755 1047b8d11d 208e91a17f M      GIT-VERSION-GEN\n```\n\nGit refers to this output as the [\"raw\" format](https://git-scm.com/docs/git-diff-tree#_raw_output_format).\nIn short, each line of output lists filepairs and the accompanying metadata\nabout what has changed between the start and end revisions. Compared to\ngenerating the \"patch\" output for large changes, this process is relatively\nquick and provides a summary of everything that changed. This command can optionally perform rename detection by  appending the `-M` flag to check if identified changes were due to a file rename.\n\nWith this information, we could use `git-diff(1)` to compute each of the\nfilepair diffs individually. For example, we can provide the blob IDs\ndirectly:\n\n```shell\n$ git diff 1047b8d11de767d290170979a9a20de1f5692e26 208e91a17f04558ca66bc19d73457ca64d5385f\n```\n\nWe can repeat this process for each of the filepairs, but spinning up a\nseparate Git process for each individual file diff is not very efficient.\nFurthermore, when using blob IDs, the diff loses some contextual information\nsuch as the change status, and file modes which are stored in with the parent\ntree object. What we really want is a mechanism to feed \"raw\" filepair info and\ngenerate the corresponding patch output.\n\nWith the 2.50 release, Git has a new built-in command named\n[`git-diff-pairs(1)`](https://git-scm.com/docs/git-diff-pairs). This command\naccepts \"raw\" formatted filepair info as input on stdin to determine exactly which patches to output. The following example showcases how this command could be\nused:\n\n```shell\n$ git diff-tree -r -z -M HEAD~ HEAD | git diff-pairs -z\n```\n\nWhen used in this manner, the resulting output is identical to using `git-diff(1)`.\nBy having a separate command to generate patch output, the \"raw\" output from\n`git-diff-tree(1)` can be broken up into smaller batches of filepairs and fed to separate\n`git-diff-pairs(1)` processes. This solves the previously mentioned scalability\nconcern because diffs no longer have to be computed all at once. Future GitLab\nreleases could build upon this mechanism to improve diff\ngeneration performance, especially in cases where large change sets are\nconcerned. For more information on this change, check out the corresponding\n[mailing-list thread](https://lore.kernel.org/git/20250228213346.1335224-1-jltobler@gmail.com/).\n\n_This project was led by [Justin Tobler](https://gitlab.com/justintobler)._\n\n## Batched reference updates\n\nGit provides the [`git-update-ref(1)`](https://git-scm.com/docs/git-update-ref)\ncommand to perform reference updates. When used with the `--stdin` flag,\nmultiple reference updates can be batched together in a single transaction by\nspecifying instructions for each reference update to be performed on stdin.\nBulk updating references in this manner also provides atomic behavior whereby a\nsingle reference update failure results in an aborted transaction and no\nreferences being updated. Here is an example showcasing this behavior:\n\n```shell\n# Create repository with three empty commits and branch named \"foo\"\n$ git init\n$ git commit --allow-empty -m 1\n$ git commit --allow-empty -m 2\n$ git commit --allow-empty -m 3\n$ git branch foo\n\n# Print out the commit IDs\n$ git rev-list HEAD\ncf469bdf5436ea1ded57670b5f5a0797f72f1afc\n5a74cd330f04b96ce0666af89682d4d7580c354c\n5a6b339a8ebffde8c0590553045403dbda831518\n\n# Attempt to create a new reference and update existing reference in transaction.\n# Update is expected to fail because the specified old object ID doesn’t match.\n$ git update-ref --stdin \u003C\u003CEOF\n> create refs/heads/bar cf469bdf5436ea1ded57670b5f5a0797f72f1afc\n> update refs/heads/foo 5a6b339a8ebffde8c0590553045403dbda831518 5a74cd330f04b96ce0666af89682d4d7580c354c\n> EOF\nfatal: cannot lock ref 'refs/heads/foo': is at cf469bdf5436ea1ded57670b5f5a0797f72f1afc but expected 5a74cd330f04b96ce0666af89682d4d7580c354c\n\n# The \"bar\" reference was not created.\n$ git switch bar\nfatal: invalid reference: bar\n```\n\nCompared to updating many references individually, updating in bulk is also\nmuch more efficient. While this works well, there might be certain\ncircumstances where it is okay for a subset of the requested reference updates\nto fail, but we still want to take advantage of the efficiency gains of bulk\nupdates.\n\nWith this release, `git-update-ref(1)` has the new `--batch-updates` option,\nwhich allows the updates to proceed even when one or more reference updates\nfails. In this mode, individual failures are reported in the following format:\n\n```text\nrejected SP (\u003Cold-oid> | \u003Cold-target>) SP (\u003Cnew-oid> | \u003Cnew-target>) SP \u003Crejection-reason> LF\n```\n\nThis allows successful reference updates to proceed while providing context to\nwhich updates were rejected and for what reason. Using the same example\nrepository from the previous example:\n\n```shell\n# Attempt to create a new reference and update existing reference in transaction.\n$ git update-ref --stdin --batch-updates \u003C\u003CEOF\n> create refs/heads/bar cf469bdf5436ea1ded57670b5f5a0797f72f1afc\n> update refs/heads/foo 5a6b339a8ebffde8c0590553045403dbda831518 5a74cd330f04b96ce0666af89682d4d7580c354c\n> EOF\nrejected refs/heads/foo 5a6b339a8ebffde8c0590553045403dbda831518 5a74cd330f04b96ce0666af89682d4d7580c354c incorrect old value provided\n\n# The \"bar\" reference was created even though the update to \"foo\" was rejected.\n$ git switch bar\nSwitched to branch 'bar'\n```\n\nThis time, with the `--batch-updates` option, the reference creation succeeded\neven though the update didn't work. This patch series lays the groundwork for\nfuture performance improvements in `git-fetch(1)` and `git-receive-pack(1)`\nwhen references are updated in bulk. For more information, check the\n[mailing-list thread](https://lore.kernel.org/git/20250408085120.614893-1-karthik.188@gmail.com/)\n\n_This project was led by [Karthik Nayak](https://gitlab.com/knayakgl)._\n\n## New filter option for git-cat-file(1)\n\nWith [`git-cat-file(1)`](https://git-scm.com/docs/git-cat-file), it is possible\nto print info for all objects contained in the repository via the\n`--batch–all-objects` option. For example:\n\n```shell\n# Setup simple repository.\n$ git init\n$ echo foo >foo\n$ git add foo\n$ git commit -m init\n\n# Create an unreachable object.\n$ git commit --amend --no-edit\n\n# Use git-cat-file(1) to print info about all objects including unreachable objects.\n$ git cat-file --batch-all-objects --batch-check='%(objecttype) %(objectname)'\ncommit 0b07e71d14897f218f23d9a6e39605b466454ece\ntree 205f6b799e7d5c2524468ca006a0131aa57ecce7\nblob 257cc5642cb1a054f08cc83f2d943e56fd3ebe99\ncommit c999f781fd7214b3caab82f560ffd079ddad0115\n```\n\nIn some situations, a user might want to search through all objects in the\nrepository, but only output a subset based on some specified attribute. For\nexample, if we wanted to see only the objects that are commits, we could use\n`grep(1)`:\n\n```shell\n$ git cat-file --batch-all-objects --batch-check='%(objecttype) %(objectname)' | grep ^commit\ncommit 0b07e71d14897f218f23d9a6e39605b466454ece\ncommit c999f781fd7214b3caab82f560ffd079ddad0115\n```\n\nWhile this works, one downside with filtering the output is that\n`git-cat-file(1)` still has to traverse all the objects in the repository, even\nthe ones that the user is not interested in. This can be rather inefficient.\n\nWith this release, `git-cat-file(1)` now has the `--filter` option, which only\nshows objects matching the specified criteria. This is similar to the option of\nthe same name for `git-rev-list(1)`, but with only a subset of the filters\nsupported. The supported filters are `blob:none`, `blob:limit=`, as well as\n`object:type=`. Similar to the previous example, objects can be filtered by\ntype with Git directly:\n\n```shell\n$ git cat-file --batch-all-objects --batch-check='%(objecttype) %(objectname)' --filter='object:type=commit'\ncommit 0b07e71d14897f218f23d9a6e39605b466454ece\ncommit c999f781fd7214b3caab82f560ffd079ddad0115\n```\n\nNot only is it convenient for Git to handle the processing, for large\nrepositories with many objects, it is also potentially more efficient. If a\nrepository has bitmap indices, it becomes possible for Git to efficiently\nlookup objects of a specific type, and thus avoid scanning through the\npackfile, which leads to a significant speedup. Benchmarks conducted on the\n[Chromium repository](https://github.com/chromium/chromium.git) show\nsignificant improvements:\n\n```text\nBenchmark 1: git cat-file --batch-check --batch-all-objects --unordered --buffer --no-filter\n   Time (mean ± σ):     82.806 s ±  6.363 s    [User: 30.956 s, System: 8.264 s]\n   Range (min … max):   73.936 s … 89.690 s    10 runs\n\nBenchmark 2: git cat-file --batch-check --batch-all-objects --unordered --buffer --filter=object:type=tag\n   Time (mean ± σ):      20.8 ms ±   1.3 ms    [User: 6.1 ms, System: 14.5 ms]\n   Range (min … max):    18.2 ms …  23.6 ms    127 runs\n\nBenchmark 3: git cat-file --batch-check --batch-all-objects --unordered --buffer --filter=object:type=commit\n   Time (mean ± σ):      1.551 s ±  0.008 s    [User: 1.401 s, System: 0.147 s]\n   Range (min … max):    1.541 s …  1.566 s    10 runs\n\nBenchmark 4: git cat-file --batch-check --batch-all-objects --unordered --buffer --filter=object:type=tree\n   Time (mean ± σ):     11.169 s ±  0.046 s    [User: 10.076 s, System: 1.063 s]\n   Range (min … max):   11.114 s … 11.245 s    10 runs\n\nBenchmark 5: git cat-file --batch-check --batch-all-objects --unordered --buffer --filter=object:type=blob\n   Time (mean ± σ):     67.342 s ±  3.368 s    [User: 20.318 s, System: 7.787 s]\n   Range (min … max):   62.836 s … 73.618 s    10 runs\n\nBenchmark 6: git cat-file --batch-check --batch-all-objects --unordered --buffer --filter=blob:none\n   Time (mean ± σ):     13.032 s ±  0.072 s    [User: 11.638 s, System: 1.368 s]\n   Range (min … max):   12.960 s … 13.199 s    10 runs\n\nSummary\n   git cat-file --batch-check --batch-all-objects --unordered --buffer --filter=object:type=tag\n    74.75 ± 4.61 times faster than git cat-file --batch-check --batch-all-objects --unordered --buffer --filter=object:type=commit\n   538.17 ± 33.17 times faster than git cat-file --batch-check --batch-all-objects --unordered --buffer --filter=object:type=tree\n   627.98 ± 38.77 times faster than git cat-file --batch-check --batch-all-objects --unordered --buffer --filter=blob:none\n  3244.93 ± 257.23 times faster than git cat-file --batch-check --batch-all-objects --unordered --buffer --filter=object:type=blob\n  3990.07 ± 392.72 times faster than git cat-file --batch-check --batch-all-objects --unordered --buffer --no-filter\n```\n\nInterestingly, these results indicate that the computation time now scales with\nthe number of objects for a given type instead of the number of total objects\nin the packfile. The original mailing-list thread can be found\n[here](https://lore.kernel.org/git/20250221-pks-cat-file-object-type-filter-v1-0-0852530888e2@pks.im/).\n\n_This project was led by [Patrick Steinhardt](https://gitlab.com/pks-gitlab)._\n\n## Improved performance when generating bundles\n\nGit provides a means to generate an archive of a repository which contains a\nspecified set of references and accompanying reachable objects via the\n[`git-bundle(1)`](https://git-scm.com/docs/git-bundle) command. This operation\nis used by GitLab to generate repository backups and also as part of the\n[bundle-URI](https://git-scm.com/docs/bundle-uri) mechanism.\n\nFor large repositories containing millions of references, this operation can\ntake hours or even days. For example, with the main GitLab repository\n([gitlab-org/gitlab](https://gitlab.com/gitlab-org/gitlab)), backup times were\naround 48 hours. Investigation revealed there was a performance bottleneck due\nto how Git was performing a check to avoid duplicated references being included\nin the bundle. The implementation used a nested `for` loop to iterate and\ncompare all listed references, leading to O(N^2) time complexity. This scales\nvery poorly as the number of references in a repository increases.\n\nIn this release, this issue was addressed by replacing the nested loops with a\nmap data structure leading to a significant speedup. The following benchmark\nthe performance improvement for creating a bundle with a repository containing\n100,000 references:\n\n```text\nBenchmark 1: bundle (refcount = 100000, revision = master)\n  Time (mean ± σ):     14.653 s ±  0.203 s    [User: 13.940 s, System: 0.762 s]\n  Range (min … max):   14.237 s … 14.920 s    10 runs\n\nBenchmark 2: bundle (refcount = 100000, revision = HEAD)\n  Time (mean ± σ):      2.394 s ±  0.023 s    [User: 1.684 s, System: 0.798 s]\n  Range (min … max):    2.364 s …  2.425 s    10 runs\n\nSummary\n  bundle (refcount = 100000, revision = HEAD) ran\n    6.12 ± 0.10 times faster than bundle (refcount = 100000, revision = master)\n```\n\nTo learn more, check out our blog post\n[How we decreased GitLab repo backup times from 48 hours to 41 minutes](https://about.gitlab.com/blog/how-we-decreased-gitlab-repo-backup-times-from-48-hours-to-41-minutes/).\nYou can also find the original mailing list thread\n[here](https://lore.kernel.org/git/20250401-488-generating-bundles-with-many-references-has-non-linear-performance-v1-0-6d23b2d96557@gmail.com/).\n\n_This project was led by [Karthik Nayak](https://gitlab.com/knayakgl)._\n\n## Better bundle URI unbundling\n\nThrough the [bundle URI](https://git-scm.com/docs/bundle-uri) mechanism in Git,\nlocations to fetch bundles from can be provided to clients with the goal to\nhelp speed up clones and fetches. When a client downloads a bundle, references\nunder `refs/heads/*` are copied from the bundle into the repository along with\ntheir accompanying objects. A bundle might contain additional references\noutside of `refs/heads/*` such as `refs/tags/*`, which are simply ignored when\nusing bundle URI on clone.\n\nIn Git 2.50, this restriction is lifted, and all references\nmatching `refs/*` contained in the downloaded bundle are copied.\n[Scott Chacon](https://github.com/schacon), who contributed this functionality,\ndemonstrates the difference when cloning\n[gitlab-org/gitlab-foss](https://gitlab.com/gitlab-org/gitlab-foss):\n\n```shell\n$ git-v2.49 clone --bundle-uri=gitlab-base.bundle https://gitlab.com/gitlab-org/gitlab-foss.git gl-2.49\nCloning into 'gl2.49'...\nremote: Enumerating objects: 1092703, done.\nremote: Counting objects: 100% (973405/973405), done.\nremote: Compressing objects: 100% (385827/385827), done.\nremote: Total 959773 (delta 710976), reused 766809 (delta 554276), pack-reused 0 (from 0)\nReceiving objects: 100% (959773/959773), 366.94 MiB | 20.87 MiB/s, done.\nResolving deltas: 100% (710976/710976), completed with 9081 local objects.\nChecking objects: 100% (4194304/4194304), done.\nChecking connectivity: 959668, done.\nUpdating files: 100% (59972/59972), done.\n\n$ git-v2.50 clone --bundle-uri=gitlab-base.bundle https://gitlab.com/gitlab-org/gitlab-foss.git gl-2.50\nCloning into 'gl-2.50'...\nremote: Enumerating objects: 65538, done.\nremote: Counting objects: 100% (56054/56054), done.\nremote: Compressing objects: 100% (28950/28950), done.\nremote: Total 43877 (delta 27401), reused 25170 (delta 13546), pack-reused 0 (from 0)\nReceiving objects: 100% (43877/43877), 40.42 MiB | 22.27 MiB/s, done.\nResolving deltas: 100% (27401/27401), completed with 8564 local objects.\nUpdating files: 100% (59972/59972), done.\n```\n\nComparing these results, we see that Git 2.50 fetches 43,887 objects\n(40.42 MiB) after the bundle was extracted whereas Git 2.49 fetches a\ntotal of 959,773 objects (366.94 MiB). Git 2.50 fetches roughly 95% fewer\nobjects and 90% less data, which benefits both the client and the server. The\nserver needs to process a lot less data to the client and the client needs to\ndownload and extract less data. In the example provided by Scott this led to a\nspeedup of 25%.\n\nTo learn more, check out the corresponding\n[mailing-list thread](https://lore.kernel.org/git/pull.1897.git.git.1740489585344.gitgitgadget@gmail.com/).\n\n_This patch series was contributed by [Scott Chacon](https://github.com/schacon)._\n\n## Read more\n\nThis article highlighted just a few of the contributions made by GitLab and\nthe wider Git community for this latest release. You can learn about these from\nthe [official release announcement](https://lore.kernel.org/git/xmqq1prj1umb.fsf@gitster.g/) of the Git project. Also, check\nout our [previous Git release blog posts](https://about.gitlab.com/blog/tags/git/)\nto see other past highlights of contributions from GitLab team members.\n","2025-06-16",[984,903,267],{"featured":91,"template":788,"slug":1062},"what-s-new-in-git-2-50-0",{"content":1064,"config":1073},{"title":1065,"description":1066,"authors":1067,"heroImage":1069,"date":1070,"body":1071,"category":747,"tags":1072},"Celebrating Git's 20th anniversary with creator Linus Torvalds","Discover the origins of the open-source version control system, why he handed over the reins a few months in, and what he thinks about adding new programming languages to Git.",[1068],"Patrick Steinhardt","https://res.cloudinary.com/about-gitlab-com/image/upload/v1749662510/Blog/Hero%20Images/git-20-years-opt1.png","2025-04-07","The Git version control system was first released on April 7, 2005, by the father of the Linux kernel, Linus Torvalds. To mark the 20th anniversary of this important project that is nowadays used by almost every single developer, I interviewed Linus about the history of Git, why he handed over maintainership of Git, and what he considers to be its most important milestones.\n\n**In 2005, you were already the maintainer of the thriving Linux kernel. Why did you decide to start a new version control system?**\n\nSo, I got into it from really despising version control.\n\nI had used the traditional version control systems (CVS/RCS/SCCS) both as an end user (i.e., tracking open source projects like [GCC](https://gcc.gnu.org/)) and as a developer (we used CVS at Transmeta for everything) and absolutely hated the experience with a passion.\n\n\u003Cimg src=\"https://about.gitlab.com/images/blogimages/linustorvalds.png\" align=\"left\" width=\"200px\" style=\"padding-right: 20px; padding-bottom: 10px\"/>\n\nAnd yes, back then most projects that used CVS had probably moved to [SVN](https://subversion.apache.org/), but honestly, I always felt that SVN was just \"lipstick on a pig.\" It was just CVS in another form, with some UI improvements, but none of the fundamentals fixed, and a few new problems added.\n\nThe problems with CVS and its ilk are too many to even list, and, happily, they have largely become irrelevant and younger developers have probably never even had to deal with any of it. I absolutely refused to deal with it for the kernel, even though a few subsystems (notably the networking side) were actually using CVS to track their code back in the '90s.\n\nAnyway, back then I lived in the Bay Area, and Larry McVoy, who I knew from other projects (mainly [lmbench](https://www.usenix.org/legacy/publications/library/proceedings/sd96/full_papers/mcvoy.pdf)), had started BitMover, which had a new version control model called BitKeeper, or BK, for short.\n\nBK wasn't open source, but Larry liked open source projects and really felt that the lack of version control was holding the kernel back. He wasn't wrong, but the traditional source code managers (SCMs) really didn't work for me at all. Larry spent some time showing me and David Miller (networking maintainer and existing CVS user) what BitKeeper could do.\n\nBK wasn't perfect, and it was based on Source Code Control System (SCCS) like so many other traditional SCMs were, and thus had the same broken \"history per file\" model that everybody else had, and that causes huge and fundamental issues with file renaming and deletion.\n\nBut BK also wasn't just that \"lipstick\" thing. It may have used SCCS at a low level, but on a higher level it fixed some really fundamental things, and did proper distributed development, and had a real global – not per-file – history that made merging code from different trees actually work.\n\nWith CVS, creating branches and merging them was something you had to plan and discuss with people, and were major events. With BK, every repository was a branch. We take that for granted now, and Git obviously took it much further by having many branches *per* repository, but even the much more limited BK model was really a big deal at the time.\n\nAgain, BK wasn't perfect. As mentioned, it did do per-file history, which really is a big fundamental problem that makes renaming and file merging simply not work reliably, and inevitably causes chaos and pain (for CVS people, think Attic, shudder). And it had some scalability issues, too, but those took a while to become more than a bit problematic.\n\nBut the biggest problem with BK was the licensing, and while over the years (we used BK from 2002 to 2005) a lot of kernel maintainers did end up switching over to it, it was always a bit of a friction point. And that friction came to a head in late 2004, and the use of BK for the kernel basically became untenable a few months later.\n\nI was in the situation that for three years I'd finally used source control that worked, and it really had solved a lot of problems. There was no way I was going back to the days before source control, but in the years we'd been using BK, nothing better had really come out of the open source community.\n\nSure, people knew that CVS and SVN didn't work well, and there were projects that tried alternate approaches, but some of those approaches were even worse (basically amounting to \"fancy patch tracking\"), or had some good ideas but in the process making up some entirely new horrible design mistakes ([Monotone](https://www.monotone.ca/)).\n\nSo, I looked around for a while, and decided that I didn't have any options – I had to write my own.\n\nNow, technically, it actually did take only a few days to make the first version of Git, and hey, it's all there in the Git commit history. It's easy enough to see how it goes from pretty much zero to being usable enough that I started applying patches from others a week later (and being actively used for the kernel a few days after that).\n\nBut that ignores the fact that I had been *thinking* about the problem for a while by then. Writing code is easy. Getting a good design is what matters. So there was a fair amount of background to those few days that is pretty important, and that part doesn't show up in the history.\n\nAnd hey, that first version was very, very rough, and didn't do a lot that was to come later. But you can definitely already see much of the core design in those first few days.\n\n**Can you give us a short recount of the first days and weeks of how the Git project was started?**\n\nI had basically decided that I will stop kernel development until I had an alternative that worked for me. The main goals were to be distributed and high performance, and be something you could absolutely rely on to catch any corruption.\n\nBut I really do want to stress that I wasn't interested in SCMs, per se. I was interested in the end result, not in the process. So Git was never like the kernel for me: I do Linux because I think kernels are interesting - I did Git because I had to.\n\nWhich then directly segues into your next question.\n\n**You handed over the maintainership of Git to Junio Hamano after a couple of months, and Junio is still the maintainer. Why did you hand over maintainership and what made you pick Junio?**\n\nHanding over maintainership was not a hard choice. It was very much: \"The moment somebody else comes along that I can trust to keep it going, I'll go back to doing just the kernel.\"\n\nWhich is not to say that I just threw things over the wall and prayed for the best. I ended up maintaining Git for something like four months because I felt I needed to find somebody who would stick around, and had that hard-to-explain quality of \"GoodTaste\"(TM).\n\nJunio had been one of the very early people involved (he literally showed up the first week of development), but it's not like I just said, \"Tag, you're it.\"  It takes a while to see who sticks around, and who writes code and makes decisions that make sense.\n\nAnd I think Junio has been exemplary. I get much too much credit for the few months I spent on Git - particularly in light of the 20th anniversary. I'll take credit for getting the core design right, and getting the project started, but it really is Junio who has led the project (not to belittle the hundreds of other people involved, but still).\n\n**The initial version of the Mercurial version control system was released only 12 days after the initial version of Git, on April 19, 2005. Many people claim that Mercurial's user experience was superior over Git's, but nowadays Git is significantly more popular. Why do you think that Git has won over Mercurial?**\n\nOh, a big part of it is obviously just network effects, and SCMs have very strong network effects. It's why CVS survived as long as it did despite its limitations.\n\nSo, the fact that the kernel used Git (and then at some point it got to be very popular in the Ruby on Rails community, and then it took off everywhere).\n\nBut I really do think that the design of Git is superior. The core model is both very simple and very powerful, and I think that made it easier to translate into other environments. JGit was an early example of that, but you obviously have implementations like the MSgit virtual filesystem, etc.\n\nAnd while Git was famously somewhat hard to use early on, I really do think that some of that comes from having done things \"right,\" where people coming from other environments found Git non-intuitive because Git really did a few hard decisions that a traditional SCM person would never have done.\n\n**The Git project has not stood still since you handed maintainership over to Junio, and its community is always busy working on new features. What do you think the most important milestones were after you have left the project?**\n\nThat's really hard for me to say, mainly because I obviously made Git work for me, and so the things *I* use have worked from pretty much Day One. Just as an obvious example: Making Git work on Windows was obviously a huge step for other people, but it affected *me* not at all ;)\n\nThere's obviously all the infrastructure within Git itself to make it a lot easier to use, but I think most of the big milestones have all been around people taking the Git infrastructure and building things around it. Those often end up feeding back into Git features, of course, but, at the same time, the milestone is about something external.\n\nTo give an obvious example: All the big Git hosting sites were big milestones. Making Git be distributed was what made those so much easier to do, but the *milestone* was how then the hosting made it so easy for users to use Git for various projects.\n\n**If you had the capacity to work on Git full time again, would there be anything that you would like to implement?**\n\nAbsolutely not. Git did everything I really needed from very early on – my use is actually fairly limited, and I only really care about one project.\n\nAnd I say \"absolutely not\" because I refer you to that earlier answer: I was never really interested in SCMs at all to begin with. I think a large reason for why Git ended up being so different - mostly in good ways - from other SCMs was that I approached it more like I would a distributed journaling filesystem, not really a traditional SCM.\n\n**Is there any feature or design decision in Git that you have come to regret in retrospect?**\n\nDesign decisions? No. I still think the high-level design is just very good, and you can discuss various Git concepts without ever getting into the nitty-gritty complexity of actual implementation.\n\nAnd I think that's important in a project. You need a certain high-level design principle to guide the conceptual direction of a project.\n\nSometimes people take that too far, and think that the high-level design means that the implementation must then slavishly follow some core principle. And that's wrong, too – the *implementation* will have lots of nasty corner cases because reality is hard and people want odd things, but there needs to be some kind of top-level design that you can point to and reason about at a high level before you get your hands dirty with the nasty reality.\n\nAnd I think Git has a good balance of that. A very straightforward object store design (call them \"structured Merkle trees\" if you are a CS person, or you might just think of them as a \"content addressable storage\" if you are a filesystem person). That core design is there – but at the same time, it's realistically just a very tiny part of the actual code. Most of the *code* is about all the things you can do with the core design, but that basic clarity of design still gives the project some kind of high-level structure.\n\nIt's the same kind of high-level structure that Unix itself had, whether you said \"everything is a file\" or you were talking about process handling. There are a few \"concepts\" that drive the design, but then 99% of the code is about the ugly harsh details of what you build on top of that to make it all useful in the real world.\n\nI have two mantras in technology: \"If I have seen further, it is by standing on the shoulders of giants\" (Newton) and \"Genius is 1% inspiration and 99% perspiration\" (Edison).\n\nBut talking about the 99% perspiration: While I am very happy with the big design, there are certainly various details that I would have done differently if I were to do Git today.\n\nBut honestly, they aren't that important. What's much more important is all the *good* details that have been done over the last two decades.\n\n**The Linux kernel has started to use Rust as a programming language for some of its subsystems. Do you think it makes sense to start using such newer programming languages like this in Git?**\n\nI suspect that when it comes to Git, there's less reason to try to mix languages, which is always somewhat painful.\n\nIn the kernel, the end result is one single kernel binary – even if much of it can be loaded dynamically as modules, it is still linked together into effectively one single binary.\n\nAnd that makes using multiple languages more complex. But, on the other hand, the kernel also has more reason to worry about memory safety and, thus, look at newer languages.\n\nIn Git, if somebody wants to write parts of it in Rust or another language, I suspect it makes much more sense to just go for a separate implementation rather than try to mix languages in one binary.\n\nMuch of the Git core ideas are simple enough that just having parallel implementations of the core likely isn't too painful, and then you can target particular problem spaces where a different language makes more sense.\n\nAnd we've seen that in Git already, of course: That's exactly what JGit is. The use of a different language was due to a different web-based environment where that language choice was much more natural.\n\nI know that there are already Rust implementations of some of the core Git functionality, and I think the situation is similar: I suspect they make more sense in specific situations than in some kind of overall \"let's convert things to Rust\" kind of way.\n\nSo for anybody who is interested in implementing things in Rust, I'd suggest looking for target areas where the advantages of Rust are more obvious. I don't think C has actually been all that problematic in the standard Git source base.\n\n**New version control systems are popping up every couple of years. Do you think that Git will stay relevant in the future?**\n\nI already mentioned the network effects in SCMs, and I think that means that to replace Git you have to be not just slightly better, you have to be enormously better. Or so compatible that you effectively are just a new implementation of Git.\n\nAnd I do think the SCM situation has changed – Git doesn't have the kinds of huge gaping fundamental problems that SCMs had before Git. So being \"enormously better\" is fairly hard.\n\nSo, yes, I would expect Git to stay relevant for the foreseeable future, with people working on improvements *around* Git rather than replacements.\n\n*Note: This interview has been edited for length and clarity.*\n\n> Take a [journey with us through Git's 20-year history](https://about.gitlab.com/blog/journey-through-gits-20-year-history/).\n\n## Learn more about Git\n\n- [What's new in Git 2.49.0?](https://about.gitlab.com/blog/whats-new-in-git-2-49-0/)  \n- [What’s new in Git 2.48.0?](https://about.gitlab.com/blog/whats-new-in-git-2-48-0/)  \n- [A beginner's guide to the Git reftable format](https://about.gitlab.com/blog/a-beginners-guide-to-the-git-reftable-format/)\n- [Git project](https://git-scm.com/)",[903,984],{"slug":1074,"featured":91,"template":788},"celebrating-gits-20th-anniversary-with-creator-linus-torvalds",{"content":1076,"config":1084},{"title":1077,"description":1078,"authors":1079,"heroImage":1057,"date":1081,"body":1082,"category":747,"tags":1083},"What's new in Git 2.49.0?","Learn about the latest version of Git, including improved performance thanks to zlib-ng, a new name-hashing algorithm, and git-backfill(1).",[1080],"Toon Claes","2025-03-14","The Git project recently released [Git 2.49.0](https://lore.kernel.org/git/xmqqfrjfilc8.fsf@gitster.g/). Let's look at a few notable highlights from this release, which includes contributions from GitLab's Git team and the wider Git community.\n\nWhat's covered:\n- [git-backfill(1) and the new path-walk API](#git-backfill(1)-and-the-new-path-walk-api)\n- [Introduction of zlib-ng](#introduction-of-zlib-ng)\n- [Continued iteration on Meson](#continued-iteration-on-meson)\n- [Deprecation of .git/branches/ and .git/remotes/](#deprecation-of-.gitbranches%2F-and-.git%2Fremotes%2F)\n- [Rust bindings for libgit](#rust-bindings-for-libgit)\n- [New name-hashing algorithm](#new-name-hashing-algorithm)\n- [Promisor remote capability](#promisor-remote-capability)\n- [Thin clone using `--revision`](#thin-clone-using---revision)\n\n## git-backfill(1) and the new path-walk API\n\nWhen you [`git-clone(1)`](https://git-scm.com/docs/git-clone) a Git repository,\nyou can pass it the\n[`--filter`](https://git-scm.com/docs/git-clone#Documentation/git-clone.txt-code--filterltfilter-specgtcode)\noption. Using this option allows you to create a _partial clone_. In a partial\nclone the server only sends a subset of reachable objects according to the given\nobject filter. For example, creating a clone with `--filter=blob:none` will not\nfetch any blobs (file contents) from the server and create a _blobless clone_.\n\nBlobless clones have all the reachable commits and trees, but no blobs. When you\nperform an operation like\n[`git-checkout(1)`](https://git-scm.com/docs/git-checkout), Git will download\nthe missing blobs to complete that operation. For some operations, like\n[`git-blame(1)`](https://git-scm.com/docs/git-blame), this might result in\ndownloading objects one by one, which will slow down the command drastically.\nThis performance degradation occurs because `git-blame(1)` must traverse the\ncommit history to identify which specific blobs it needs, then request each\nmissing blob from the server separately.\n\nIn Git 2.49, a new subcommand `git-backfill(1)` is introduced, which can be\nused to download missing blobs in a blobless partial clone.\n\nUnder the hood, the `git-backfill(1)` command leverages the new path-walk API, which is different from how Git generally iterates over commits. Rather than iterating over the commits one at a time and recursively visiting the trees and blobs associated with each commit, the path-walk API does traversal by path. For each path, it adds a list of associated tree objects to a stack. This stack is then processed in a depth-first order. So, instead of processing every object in commit `1` before moving to commit `2`, it will process all versions of file `A` across all commits before moving to file `B`. This approach greatly improves performance in scenarios where grouping by path is essential.\n\nLet me demonstrate its use by making a blobless clone of [`gitlab-org/git`](https://gitlab.com/gitlab-org/git):\n\n```shell\n$ git clone --filter=blob:none --bare --no-tags git@gitlab.com:gitlab-org/git.git\nCloning into bare repository 'git.git'...\nremote: Enumerating objects: 245904, done.\nremote: Counting objects: 100% (1736/1736), done.\nremote: Compressing objects: 100% (276/276), done.\nremote: Total 245904 (delta 1591), reused 1547 (delta 1459), pack-reused 244168 (from 1)\nReceiving objects: 100% (245904/245904), 59.35 MiB | 15.96 MiB/s, done.\nResolving deltas: 100% (161482/161482), done.\n```\n\nAbove, we use `--bare` to ensure Git doesn't need to download any blobs to check\nout an initial branch. We can verify this clone does not contain any blobs:\n\n```sh\n$ git cat-file --batch-all-objects --batch-check='%(objecttype)' | sort | uniq -c\n  83977 commit\n 161927 tree\n```\n\nIf you want to see the contents of a file in the repository, Git has to download it:\n\n```sh\n$ git cat-file -p HEAD:README.md\nremote: Enumerating objects: 1, done.\nremote: Total 1 (delta 0), reused 0 (delta 0), pack-reused 1 (from 1)\nReceiving objects: 100% (1/1), 1.64 KiB | 1.64 MiB/s, done.\n\n[![Build status](https://github.com/git/git/workflows/CI/badge.svg)](https://github.com/git/git/actions?query=branch%3Amaster+event%3Apush)\n\nGit - fast, scalable, distributed revision control system\n=========================================================\n\nGit is a fast, scalable, distributed revision control system with an\nunusually rich command set that provides both high-level operations\nand full access to internals.\n\n[snip]\n```\n\nAs you can see above, Git first talks to the remote repository to download the blob before\nit can display it.\n\nWhen you would like to `git-blame(1)` that file, it needs to download a lot\nmore:\n\n```sh\n$ git blame HEAD README.md\nremote: Enumerating objects: 1, done.\nremote: Counting objects: 100% (1/1), done.\nremote: Total 1 (delta 0), reused 0 (delta 0), pack-reused 0 (from 0)\nReceiving objects: 100% (1/1), 1.64 KiB | 1.64 MiB/s, done.\nremote: Enumerating objects: 1, done.\nremote: Counting objects: 100% (1/1), done.\nremote: Total 1 (delta 0), reused 0 (delta 0), pack-reused 0 (from 0)\nReceiving objects: 100% (1/1), 1.64 KiB | 1.64 MiB/s, done.\nremote: Enumerating objects: 1, done.\nremote: Counting objects: 100% (1/1), done.\nremote: Total 1 (delta 0), reused 0 (delta 0), pack-reused 0 (from 0)\nReceiving objects: 100% (1/1), 1.64 KiB | 1.64 MiB/s, done.\nremote: Enumerating objects: 1, done.\n\n[snip]\n\ndf7375d772 README.md (Ævar Arnfjörð Bjarmason 2021-11-23 17:29:09 +0100  1) [![Build status](https://github.com/git/git/workflows/CI/badge.svg)](https://github.com/git/git/actions?query=branch%3Amaster+event%3Apush)\n5f7864663b README.md (Johannes Schindelin \t2019-01-29 06:19:32 -0800  2)\n28513c4f56 README.md (Matthieu Moy        \t2016-02-25 09:37:29 +0100  3) Git - fast, scalable, distributed revision control system\n28513c4f56 README.md (Matthieu Moy        \t2016-02-25 09:37:29 +0100  4) =========================================================\n556b6600b2 README\t(Nicolas Pitre       \t2007-01-17 13:04:39 -0500  5)\n556b6600b2 README\t(Nicolas Pitre       \t2007-01-17 13:04:39 -0500  6) Git is a fast, scalable, distributed revision control system with an\n556b6600b2 README\t(Nicolas Pitre       \t2007-01-17 13:04:39 -0500  7) unusually rich command set that provides both high-level operations\n556b6600b2 README\t(Nicolas Pitre       \t2007-01-17 13:04:39 -0500  8) and full access to internals.\n556b6600b2 README\t(Nicolas Pitre       \t2007-01-17 13:04:39 -0500  9)\n\n[snip]\n```\n\nWe've truncated the output, but as you can see, Git goes to the server for each\nrevision of that file separately. That's really inefficient. With\n`git-backfill(1)` we can ask Git to download all blobs:\n\n```shell\n$ git backfill\nremote: Enumerating objects: 50711, done.\nremote: Counting objects: 100% (15438/15438), done.\nremote: Compressing objects: 100% (708/708), done.\nremote: Total 50711 (delta 15154), reused 14730 (delta 14730), pack-reused 35273 (from 1)\nReceiving objects: 100% (50711/50711), 11.62 MiB | 12.28 MiB/s, done.\nResolving deltas: 100% (49154/49154), done.\nremote: Enumerating objects: 50017, done.\nremote: Counting objects: 100% (10826/10826), done.\nremote: Compressing objects: 100% (634/634), done.\nremote: Total 50017 (delta 10580), reused 10192 (delta 10192), pack-reused 39191 (from 1)\nReceiving objects: 100% (50017/50017), 12.17 MiB | 12.33 MiB/s, done.\nResolving deltas: 100% (48301/48301), done.\nremote: Enumerating objects: 47303, done.\nremote: Counting objects: 100% (7311/7311), done.\nremote: Compressing objects: 100% (618/618), done.\nremote: Total 47303 (delta 7021), reused 6693 (delta 6693), pack-reused 39992 (from 1)\nReceiving objects: 100% (47303/47303), 40.84 MiB | 15.26 MiB/s, done.\nResolving deltas: 100% (43788/43788), done.\n```\n\nThis backfills all blobs, turning the blobless clone into a full clone:\n\n```shell\n$ git cat-file --batch-all-objects --batch-check='%(objecttype)' | sort | uniq -c\n 148031 blob\n  83977 commit\n 161927 tree\n```\n\nThis [project](https://lore.kernel.org/git/pull.1820.v3.git.1738602667.gitgitgadget@gmail.com/)\nwas led by [Derrick Stolee](https://stolee.dev/) and was merged with\n[e565f37553](https://gitlab.com/gitlab-org/git/-/commit/e565f3755342caf1d21e22359eaf09ec11d8c0ae).\n\n## Introduction of zlib-ng\n\nAll objects in the `.git/` folder are compressed by Git using [`zlib`](https://zlib.net/). `zlib` is the reference implementation for the [RFC\n1950](https://datatracker.ietf.org/doc/html/rfc1950): ZLIB Compressed Data\nFormat. Created in 1995, `zlib` has a long history and is incredibly\nportable, even supporting many systems that predate the Internet. Because of its\nwide support of architectures and compilers, it has limitations in what it is\ncapable of.\n\nThe fork [`zlib-ng`](https://github.com/zlib-ng/zlib-ng) was created to\naccommodate the limitations. `zlib-ng` aims to be optimized for modern\nsystems. This fork drops support for legacy systems and instead brings in\npatches for Intel optimizations, some Cloudflare optimizations, and a couple\nother smaller patches.\n\nThe `zlib-ng` library itself provides a compatibility layer for `zlib`. The\ncompatibility later allows `zlib-ng` to be a drop-in replacement for `zlib`, but\nthat layer is not available on all Linux distributions. In Git 2.49:\n\n- A compatibility layer was added to the Git project.\n- Build options were added to both to the [`Makefile`](https://gitlab.com/gitlab-org/git/-/blob/b9d6f64393275b505937a8621a6cc4875adde8e0/Makefile#L186-187)\n  and [Meson Build file](https://gitlab.com/gitlab-org/git/-/blob/b9d6f64393275b505937a8621a6cc4875adde8e0/meson.build#L795-811).\n\nThese additions make it easier to benefit from the performance improvements of\n`zlib-ng`.\n\nIn local benchmarks, we've seen a ~25% speedup when using `zlib-ng` instead of `zlib`. And we're in the process of rolling out these changes to\nGitLab.com, too.\n\nIf you want to benefit from the gains of `zlib-ng`, first verify if Git\non your machine is already using `zlib-ng` by running\n`git version --build-options`:\n\n```shell\n$ git version --build-options\ngit version 2.47.1\ncpu: x86_64\nno commit associated with this build\nsizeof-long: 8\nsizeof-size_t: 8\nshell-path: /bin/sh\nlibcurl: 8.6.0\nOpenSSL: OpenSSL 3.2.2 4 Jun 2024\nzlib: 1.3.1.zlib-ng\n```\n\nIf the last line includes `zlib-ng` then your Git is already built\nusing the faster `zlib` variant. If not, you can either:\n\n- Ask the maintainer of the Git package you are using to include `zlib-ng` support.\n- Build Git yourself from source.\n\nThese [changes](https://gitlab.com/gitlab-org/git/-/commit/9d0e81e2ae3bd7f6d8a655be53c2396d7af3d2b0)\nwere [introduced](https://lore.kernel.org/git/20250128-b4-pks-compat-drop-uncompress2-v4-0-129bc36ae8f5@pks.im/)\nby [Patrick Steinhardt](https://gitlab.com/pks-gitlab).\n\n## Continued iteration on Meson\n\nIn our article about the Git 2.48 release,\nwe touched on [the introduction of the Meson build system](https://about.gitlab.com/blog/whats-new-in-git-2-48-0/#meson-build-system). [Meson](https://en.wikipedia.org/wiki/Meson_(software)) is\na build automation tool used by the Git project that at some point might replace [Autoconf](https://en.wikipedia.org/wiki/Autoconf),\n[CMake](https://en.wikipedia.org/wiki/CMake), and maybe even\n[Make](https://en.wikipedia.org/wiki/Make_(software)).\n\nDuring this release cycle, work continued on using Meson, adding various missing\nfeatures and stabilization fixes:\n\n  - [Improved test coverage for\n\tCI](https://lore.kernel.org/git/20250122-b4-pks-meson-additions-v3-0-5a51eb5d3dcd@pks.im/)\n\twas merged in\n\t[72f1ddfbc9](https://gitlab.com/gitlab-org/git/-/commit/72f1ddfbc95b47c6011bb423e6947418d1d72709).\n  - [Bits and pieces to use Meson in `contrib/`](https://lore.kernel.org/git/20250219-b4-pks-meson-contrib-v2-0-1ba5d7fde0b9@pks.im/)\n\twere merged in\n\t[2a1530a953](https://gitlab.com/gitlab-org/git/-/commit/2a1530a953cc4d2ae62416db86c545c7ccb73ace).\n  - [Assorted fixes and improvements to the build procedure based on\n\tmeson](https://lore.kernel.org/git/20250226-b4-pks-meson-improvements-v3-0-60c77cf673ae@pks.im/)\n\twere merged in\n\t[ab09eddf60](https://gitlab.com/gitlab-org/git/-/commit/ab09eddf601501290b5c719574fbe6c02314631f).\n  - [Making Meson aware of building\n\t`git-subtree(1)`](https://lore.kernel.org/git/20250117-b4-pks-build-subtree-v1-0-03c2ed6cc42e@pks.im/)\n\twas merged in\n\t[3ddeb7f337](https://gitlab.com/gitlab-org/git/-/commit/3ddeb7f3373ae0e309d9df62ada24375afa456c7).\n  - [Learn Meson to generate HTML documentation\n\tpages](https://lore.kernel.org/git/20241227-b4-pks-meson-docs-v2-0-f61e63edbfa1@pks.im/)\n\twas merged in\n\t[1b4e9a5f8b](https://gitlab.com/gitlab-org/git/-/commit/1b4e9a5f8b5f048972c21fe8acafe0404096f694).\n\nAll these efforts were carried out by [Patrick Steinhardt](https://gitlab.com/pks-gitlab).\n\n## Deprecation of .git/branches/ and .git/remotes/\n\nYou are probably aware of the existence of the `.git` directory, and what is\ninside. But have you ever heard about the sub-directories `.git/branches/` and\n`.git/remotes/`? As you might know, reference to branches are stored in\n`.git/refs/heads/`, so that's not what `.git/branches/` is for, and what about\n`.git/remotes/`?\n\nWay back in 2005, [`.git/branches/`](https://git-scm.com/docs/git-fetch#_named_file_in_git_dirbranches)\nwas introduced to store a shorthand name for a remote, and a few months later they were\nmoved to [`.git/remotes/`](https://git-scm.com/docs/git-fetch#_named_file_in_git_dirremotes).\nIn [2006](https://lore.kernel.org/git/Pine.LNX.4.63.0604301520460.2646@wbgn013.biozentrum.uni-wuerzburg.de/),\n[`git-config(1)`](https://git-scm.com/docs/git-config) learned to store\n[remotes](https://git-scm.com/docs/git-config#Documentation/git-config.txt-remoteltnamegturl).\nThis has become the standard way to configure remotes and, in 2011, the\ndirectories `.git/branches/` and `.git/remotes/` were\n[documented](https://gitlab.com/git-scm/git/-/commit/3d3d282146e13f2d7f055ad056956fd8e5d7ed29#e615263aaf131d42be8b0d0888ebd3fec954c6c9_132_124)\nas being \"legacy\" and no longer used in modern repositories.\n\nIn 2024, the document [BreakingChanges](https://git-scm.com/docs/BreakingChanges)\nwas started to outline breaking changes for the next major version of Git\n(v3.0). While this release is not planned to happen any time soon, this document\nkeeps track of changes that are expected to be part of that release.\nIn [8ccc75c245](https://gitlab.com/git-scm/git/-/commit/8ccc75c2452b5814d2445d60d54266293ca48674),\nthe use of the directories `.git/branches/` and `.git/remotes/` was added to\nthis document and that officially marks as them deprecated and to be removed in\nGit 3.0.\n\nThanks to [Patrick Steinhardt](https://gitlab.com/pks-gitlab) for\n[formalizing this deprecation](https://lore.kernel.org/git/20250122-pks-remote-branches-deprecation-v4-5-5cbf5b28afd5@pks.im/).\n\n## Rust bindings for libgit\n\nWhen compiling Git, an internal library `libgit.a` is made. This library\ncontains some of the core functionality of Git.\n\nWhile this library (and most of Git) is written in C, in Git 2.49 bindings were\nadded to make some of these functions available in Rust. To achieve this, two\nnew Cargo packages were created: `libgit-sys` and `libgit-rs`. These packages\nlive in the [`contrib/`](https://gitlab.com/gitlab-org/git/-/tree/master/contrib) subdirectory in the Git source tree.\n\nIt's pretty\n[common](https://doc.rust-lang.org/cargo/reference/build-scripts.html#-sys-packages)\nto split out a library into two packages when a [Foreign Function\nInterface](https://en.wikipedia.org/wiki/Foreign_function_interface) is used.\nThe `libgit-sys` package provides the pure interface to C functions and links to\nthe native `libgit.a` library. The package `libgit-rs` provides a high-level\ninterface to the functions in `libgit-sys` with a feel that is more idiomatic to\nRust.\n\nSo far, the functionality in these Rust packages is very limited. It only\nprovides an interface to interact with the `git-config(1)`.\n\nThis initiative was led by [Josh Steadmon](https://lore.kernel.org/git/8793ff64a7f6c4c04dd03b71162a85849feda944.1738187176.git.steadmon@google.com/) and was merged with [a4af0b6288](https://gitlab.com/gitlab-org/git/-/commit/a4af0b6288e25eb327ae9018cee09def9e43f1cd).\n\n## New name-hashing algorithm\n\nThe Git object database in `.git/` stores most of its data in packfiles. And\npackfiles are also used to submit objects between Git server and client over the\nwire.\n\nYou can read all about the format at\n[`gitformat-pack(5)`](https://git-scm.com/docs/gitformat-pack). One important\naspect of the packfiles is delta-compression. With delta-compression not every\nobject is stored as-is, but some objects are saved as a _delta_ of another\n_base_. So instead of saving the full contents of the objects, changes compared\nto another object are stored.\n\nWithout going into the details how these deltas are calculated or stored, you\ncan imagine that it is important group files together that are very similar. In\nv2.48 and earlier, Git looked at the last 16 characters of the path name to\ndetermine whether blobs might be similar. This algorithm is named version `1`.\n\nIn Git 2.49, version `2` is available. This is an iteration on version `1`, but\nmodified so the effect of the parent directory is reduced. You can specify the\nname-hash algorithm version you want to use with option `--name-hash-version` of\n[`git-repack(1)`](https://git-scm.com/docs/git-repack).\n\n[Derrick Stolee](https://stolee.dev/), who drove this project, did some\ncomparison in resulting packfile size after running `git repack -adf\n--name-hash-version=\u003Cn>`:\n\n| Repo                                          \t| Version 1 size   | Version 2 size |\n|---------------------------------------------------|-----------|---------|\n| [fluentui](https://github.com/microsoft/fluentui) | 440 MB \t| 161 MB   |\n| Repo B                                        \t| 6,248 MB   | 856 MB   |\n| Repo C                                        \t| 37,278 MB  | 6,921 MB |\n| Repo D                                        \t| 131,204 MB | 7,463 MB |\n\nYou can read more of the details in the [patch\nset](https://lore.kernel.org/git/pull.1823.v4.git.1738004554.gitgitgadget@gmail.com/),\nwhich is merged in\n[aae91a86fb](https://gitlab.com/gitlab-org/git/-/commit/aae91a86fb2a71ff89a71b63ccec3a947b26ca51).\n\n## Promisor remote capability\n\nIt's known that Git isn't great in dealing with large files. There are some\nsolutions to this problem, like [Git LFS](https://git-lfs.com/), but there are\nstill some shortcomings. To give a few:\n\n- With Git LFS the user has to configure which files to put in LFS. The server has\n  no control about that and has to serve all files.\n- Whenever a file is committed to the repository, there is no way to get it out\n  again without rewriting history. This is annoying, especially for large files,\n  because they are stuck for eternity.\n- Users cannot change their mind on which files to put into Git LFS.\n- A tool like Git LFS requires significant effort to set up, learn, and use\n  correctly.\n\nFor some time, Git has had the concept of promisor remotes. This feature can be used to deal with large files, and in Git 2.49 this feature took a step forward.\n\nThe idea for the new “promisor-remote” capability is relatively simple: Instead of sending all\nobjects itself, a Git server can tell to the Git client \"Hey, go download these\nobjects from _XYZ_\". _XYZ_ would be a promisor remote.\n\nGit 2.49 enables the server to advertise the information of the promisor remote\nto the client. This change is an extension to\n[`gitprotocol-v2`](https://git-scm.com/docs/gitprotocol-v2). While the server\nand the client are transmitting data to each other, the server can send  names and URLs of the promisor remotes it knows\nabout.\n\nSo far, the client is not using the promisor remote info it gets from the server during clone, so all\nobjects are still transmitted from the remote the clone initiated from. We are planning to continue work on this feature, making it use promisor remote info from the server, and making it easier to use.\n\nThis [patch\nset](https://lore.kernel.org/git/20250218113204.2847463-1-christian.couder@gmail.com/)\nwas submitted by [Christian Couder](https://gitlab.com/chriscool) and merged\nwith\n[2c6fd30198](https://gitlab.com/gitlab-org/git/-/commit/2c6fd30198187c928cbf927802556908c381799c).\n\n## Thin clone using `--revision`\n\nA new `--revision` option was added to\n[`git-clone(1)`](https://git-scm.com/docs/git-clone). This enables you to create\na thin clone of a repository that only contains the history of the given\nrevision. The option is similar to `--branch`, but accepts a ref name (like\n`refs/heads/main`, `refs/tags/v1.0`, and `refs/merge-requests/123`) or a\nhexadecimal commit object ID. The difference to `--branch` is that it does not\ncreate a tracking branch and detaches `HEAD`. This means it's not suited if you\nwant to contribute back to that branch.\n\nYou can use `--revision` in combination with `--depth` to create a very minimal\nclone. A suggested use-case is for automated testing. When you have a CI system\nthat needs to check out a branch (or any reference) to perform autonomous\ntesting on the source code, having a minimal clone is all you need.\n\nThis\n[change](https://gitlab.com/gitlab-org/git/-/commit/5785d9143bcb3ef19452a83bc2e870ff3d5ed95a)\nwas\n[driven](https://lore.kernel.org/git/20250206-toon-clone-refs-v7-0-4622b7392202@iotcl.com/)\nby [Toon Claes](https://gitlab.com/toon).\n\n# Read more\n- [What’s new in Git 2.48.0?](https://about.gitlab.com/blog/whats-new-in-git-2-48-0/)\n- [What’s new in Git 2.47.0?](https://about.gitlab.com/blog/whats-new-in-git-2-47-0/)\n- [What’s new in Git 2.46.0?](https://about.gitlab.com/blog/whats-new-in-git-2-46-0/)",[267,903,984],{"slug":1085,"featured":91,"template":788},"whats-new-in-git-2-49-0",{"category":90,"slug":757,"posts":1087},[1088,1100,1113],{"content":1089,"config":1098},{"title":1090,"description":1091,"heroImage":1092,"date":1093,"body":1094,"category":757,"tags":1095,"authors":1096},"Reduce the load on GitLab Gitaly with bundle URI","Discover what the bundle URI Git feature is, how it is integrated into Gitaly, configuration best practices, and how GitLab users can benefit from it.","https://res.cloudinary.com/about-gitlab-com/image/upload/v1750099013/Blog/Hero%20Images/Blog/Hero%20Images/blog-image-template-1800x945%20%2814%29_6VTUA8mUhOZNDaRVNPeKwl_1750099012960.png","2025-06-24","Gitaly plays a vital role in the GitLab ecosystem — it is the server\ncomponent that handles all Git operations. Every push and pull made to/from\na repository is handled by Gitaly, which has direct access to the disk where\nthe actual repositories are stored. As a result, when Gitaly is under heavy\nload, some operations like CI/CD pipelines and browsing a repository in the\nGitLab UI can become quite slow. This is particularly true when serving\nclones and fetches for large and busy monorepos, which can consume large\namounts of CPU and memory.\n\n\n[Bundle URI](https://docs.gitlab.com/administration/gitaly/bundle_uris/) takes significant load off of Gitaly servers during clones by allowing Git to pre-download a bundled repository from object storage before calling the Gitaly servers to fetch the remaining objects.\n\n\nHere is a graph that shows the difference between clones without and with bundle URI.\n\n\n![Graph that shows the difference between clones without and with bundle URI](https://res.cloudinary.com/about-gitlab-com/image/upload/v1750705069/rvbm4ru1w58msd6zv4x7.png)\n\n\nThis graph shows the results of a small test we ran on an isolated GitLab installation, with Gitaly running on a machine with 2 CPUs. We wanted to test bundle URI with a large repository, so we pushed the [GitLab repository](https://gitlab.com/gitlab-org/gitlab) to the instance. We also generated a bundle beforehand.\n\n\nThe big CPU spike is from when we performed a single clone of the GitLab repository with bundle URI disabled. It's quite noticeable. A little later, we turned on bundle URI and launched three concurrent clones of the GitLab repository. Sure enough, turning on bundle URI provides massive performance gain. We can't even distinguish the CPU usage of the three clones from normal usage.\n\n\n## Configure Gitaly to use bundle URI\n\n\nTo enable bundle URI on your GitLab installation, there are a couple of things you need to configure.\n\n\n### Create a cloud bucket\n\n\nBundles need to be stored somewhere. The ideal place is in a cloud storage bucket. Gitaly uses the [gocloud.dev](https://pkg.go.dev/gocloud.dev) library to read and write from cloud storage. Any cloud storage solution supported by this library can be used. Once you have a cloud bucket URL, you can add it in the Gitaly configuration here:\n\n\n```toml\n[bundle_uri]\ngo_cloud_url = \"\u003Cbucket-uri>\"\n```\n\n\nIt must be noted that Gitaly does not manage the lifecycle of the bundles stored in the bucket. To avoid cost issues, object lifecycle policies must be enabled on the bucket in order to delete unused or old objects.\n\n\n### Enable the feature flags\n\n\nThere are two feature flags to enable:\n\n\n- `gitaly_bundle_generation` enables [auto-generation](#auto-generated) of bundles.\n\n\n- `gitaly_bundle_uri` makes Gitaly advertise bundle URIs when they are available (either manually created or auto-generated) and allows the user to [manually](#manual) generate bundles.\n\n\nThese feature flags can be enabled at-large on a GitLab installation, or per repository. See the [documentation on how to enable a GitLab feature behind a feature flag](https://docs.gitlab.com/administration/feature_flags/#how-to-enable-and-disable-features-behind-flags).\n\n\n### How to generate bundles\n\n\nGitaly offers two ways for users to use bundle URI: a [manual](#manual) way and an [auto-generated](#auto-generated) way.\n\n\n#### Manual\n\n\nIt is possible to create a bundle manually by connecting over SSH with the Gitaly node that stores the repository you want to create a bundle for, and run the following command:\n\n```shell\nsudo -u git -- /opt/gitlab/embedded/bin/gitaly bundle-uri \n--config=\u003Cconfig-file>\n--storage=\u003Cstorage-name>\n--repository=\u003Crelative-path>\n```\n\nThis command will create a bundle for the given repository and store it into the bucket configured above. When a subsequent `git clone` request will reach Gitaly for the same repository, the bundle URI mechanism described above will come into play.\n\n\n#### Auto-generated\n\n\nGitaly can also generate bundles automatically, using a heuristic to determine if it is currently handling frequent clones for the same repository.\n\n\nThe current heuristic keeps track of the number of times a `git fetch` request is issued for each repository. If the number of requests reaches a certain `threshold` in a given time `interval`, a bundle is automatically generated. Gitaly also keeps track of the last time it generated a bundle for a repository. When a new bundle should be regenerated, based on the `threshold` and `interval`, Gitaly looks at the last time a bundle was generated for the given repository. It will only generate a new bundle if the existing bundle is older than some `maxBundleAge` configuration. The old bundle is overwritten. There can only be one bundle per repository in cloud storage.\n\n\n## Using bundle URI\n\n\nWhen a bundle exists for a repository, it can be used by the `git clone` command.\n\n\n### Cloning from your terminal\n\n\nTo clone a repository from your terminal, make sure your Git configuration enables bundle URI. The configuration can be set like so:\n\n\n```shell\ngit config --global transfer.bundleuri true\n```\n\nTo verify that bundle URI is used during a clone, you can run the `git clone` command with `GIT_TRACE=1` and see if your bundle is being downloaded:\n```shell\n➜  GIT_TRACE=1 git clone https://gitlab.com/gitlab-org/gitaly\n...\n14:31:42.374912 run-command.c:667       trace: run_command: git-remote-https '\u003Cbundle-uri>'\n...\n```\n\n### Cloning during CI/CD pipelines\n\n\nOne scenario where using bundle URI would be beneficial is during a CI/CD pipeline, where each job needs a copy of the repository in order to run. Cloning a repository during a CI/CD pipeline is the same as cloning a repository from your terminal, except that the Git client in this case is the GitLab Runner. Thus, we need to configure the GitLab Runner in such a way that it can use bundle URI.\n\n\n**1. Update the helper-image**\n\n\nThe first thing to do to configure the GitLab Runner is to [overwrite the helper-image](https://docs.gitlab.com/runner/configuration/advanced-configuration/#override-the-helper-image) that your GitLab Runner instances use. The `helper-image` is the image that is used to run the process of cloning a repository before the job starts. To use bundle URI, the image needs the following:\n\n\n- Git Version 2.49.0 or later\n\n\n- [`GitLab Runner helper`](https://gitlab.com/gitlab-org/gitlab-runner/-/tree/main/apps/gitlab-runner-helper?ref_type=heads) Version 18.1.0 or later\n\n\nThe helper-images can be found [here](https://gitlab.com/gitlab-org/gitlab-runner/container_registry/1472754?orderBy=PUBLISHED_AT&sort=desc&search[]=v18.1.0). Select an image that corresponds to the OS distribution and the architecture you use for your GitLab Runner instances, and verify that the image satisfies the requirements.\n\n\nAt the time of writing, the `alpine-edge-\u003Carch>-v18.1.0*` tag meets all requirements.\n\nYou can validate the image meets all requirements with:\n\n```shell\ndocker run -it \u003Cimage:tag>\n$ git version ## must be 2.49.0 or newer\n$ gitlab-runner-helper -v ## must be 18.0 or newer\n```\n\nIf you do not find an image that meets the requirements, you can also use the helper-image as a base image and install the requirements yourself in a custom-built image that you can host on [GitLab Container Registry](https://docs.gitlab.com/user/packages/container_registry/).\n\n\nOnce you have found the image you need, you must configure your GitLab Runner instances to use it by updating your `config.toml` file:\n\n\n```toml\n[[runners]]\n (...)\n executor = \"docker\"\n [runners.docker]\n    (...)\n    helper_image = \"image:tag\" ## \u003C-- put the image name and tag here\n```\n\n\nOnce the configuration is changed, you must restart the runners for the new configuration to take effect.\n\n\n**2. Turn on the feature flag**\n\n\nNext, you must enable the `FF_USE_GIT_NATIVE_CLONE` [GitLab Runner feature flags](https://docs.gitlab.com/runner/configuration/feature-flags/) in your `.gitlab-ci.yml` file. To do that, simply add it as a variable and set to `true` :\n\n```yaml\nvariables:\n  FF_USE_GIT_NATIVE_CLONE: \"true\"\n```\n\n\nThe `GIT_STRATEGY` must also be [set to `clone`](\u003Chttps://docs.gitlab.com/ci/runners/configure_runners/#git-strategy>), as Git bundle URI only works with `clone` commands.\n\n\n## How bundle URI works\n\n\nWhen a user clones a repository with the `git clone` command, a process called [`git-receive-pack`](https://git-scm.com/docs/git-receive-pack) is launched on the client's machine. This process communicates with the remote repository's server (it can be over HTTP/S, SSH, etc.) and asks to start a [`git-upload-pack`](https://git-scm.com/docs/git-receive-pack) process. Those two processes then exchange information using the Git protocol (it must be noted that bundle URI is only supported with [Git protocol v2](https://git-scm.com/docs/protocol-v2)). The capabilities both processes support and the references and objects the client needs are among the information exchanged. Once the Git server has determined which objects to send to the client, it must package them into a packfile, which, depending on the size of the data it must process, can consume a good amount of resources.\n\n\nWhere does bundle URI fit into this interaction? If bundle URI is advertised as a capability from the `upload-pack` process and the client supports bundle URI, the Git client will ask the server if it knows about any bundle URIs. The server sends those URIs back and the client downloads those bundles.\n\n\nHere is a diagram that shows those interactions:\n\n\n```mermaid\n\nsequenceDiagram\n\n\n    participant receive as Client\n\n\n    participant upload as Server\n\n\n    participant cloud as File server\n\n\n    receive ->> upload: issue git-upload-pack\n\n\n    upload -->> receive: list of server capabilities\n\n\n    opt if bundle URI is advertised as a capability\n\n\n    receive ->> upload: request bundle URI\n\n\n    upload -->> receive: bundle URI\n\n\n    receive ->> cloud: download bundle at URI\n\n\n    cloud -->> receive: bundle file\n\n\n    receive ->> receive: clone from bundle\n\n\n    end\n\n\n    receive ->> upload: requests missing references and objects\n\n\n    upload -->> receive: packfile data\n\n```\n\n\nAs such, Git [bundle URI](https://git-scm.com/docs/bundle-uri) is a mechanism by which, during a `git clone`, a Git server can advertise the URI of a bundle for the repository being cloned by the Git client. When that is the case, the Git client can clone the repository from the bundle and request from the Git server only the missing references or objects that were not part of the bundle. This mechanism really helps to alleviate pressure from the Git server.\n\n\n## Alternatives\n\n\nGitLab also has a feature [Pack-objects cache](https://docs.gitlab.com/administration/gitaly/configure_gitaly/#pack-objects-cache). This feature works slightly differently than bundle URI. When the server packs objects together into a so-called packfile, this feature will keep that file in the cache. When another client needs the same set of objects, it doesn't need to repack them, but it can just send the same packfile again.\n\n\nThe feature is only beneficial when many clients request the exact same set of objects. In a repository that is quick-changing, this feature might not give any improvements. With bundle URI, it doesn't matter if the bundle is slightly out-of-date because the client can request missing objects after downloading the bundle and apply those changes on top. Also bundle URI in Gitaly stores the bundles on external storage, which the Pack-objects Cache stores them on the Gitaly node, so using the latter doesn't reduce network and I/O load on the Gitaly server.\n\n\n## Try bundle URI today\n\n\nYou can try the bundle URI feature in one of the following ways:\n\n\n* Download a [free, 60-day trial version of GitLab Ultimate](https://about.gitlab.com/free-trial/).\n\n\n* If you already run a self-hosted GitLab installation, upgrade to 18.1.\n\n\n* If you can't upgrade to 18.1 at this time, [download GitLab](https://about.gitlab.com/install/) to a local machine.",[757,709,984],[1097],"Olivier Campeau",{"featured":6,"template":788,"slug":1099},"reduce-the-load-on-gitlab-gitaly-with-bundle-uri",{"content":1101,"config":1111},{"title":1102,"description":1103,"body":1104,"category":757,"tags":1105,"authors":1106,"heroImage":1109,"date":1110},"GitLab Ultimate for IBM Z: Modern DevSecOps for mainframes","A new offering from GitLab and IBM bridges mainframe and cloud-native development with seamless integration, CI/CD runner support, end-to-end visibility, and cost efficiency. ","GitLab and IBM have partnered to solve a fundamental disconnect in enterprise development: enabling mainframe developers to work with the same modern tools, workflows, and collaboration features as their distributed counterparts. GitLab Ultimate for IBM Z, a GitLab-certified, integrated DevSecOps solution tailored for the mainframe environment, does just that — allowing organizations to modernize their mainframe development workflows by facilitating a seamless migration from outdated legacy library managers. With CI/CD pipelines running natively on IBM z/OS, customers experience accelerated innovation and reduced operational costs.\n\n## Challenges of today's mainframe development\n\nEnterprise organizations that use IBM Z systems for mission-critical workloads face challenges that conventional DevSecOps tools aren’t equipped to address. Cloud-native teams benefit from modern [CI/CD](https://about.gitlab.com/topics/ci-cd/) pipelines, collaborative development, and automated testing. In contrast, mainframe teams are often left behind — stuck with outdated tools that lead to costly inefficiencies and operational silos.\n\nTeams often resort to workarounds, such as SSH connections and manual file transfers, which create security vulnerabilities and audit difficulties. When compliance requirements are stringent, these improvised solutions become unacceptable risks. Meanwhile, organizations maintain expensive parallel toolchains, with legacy mainframe development tools carrying premium licensing costs while delivering limited functionality compared to modern alternatives.\n\nThis fragmentation creates two problems: slower delivery cycles and difficulty attracting developers who expect modern development experiences.\n\n> **\"GitLab Ultimate for IBM Z represents an important step in addressing a long-standing industry challenge. IDC research shows that mainframe developers often work with legacy tooling that contributes to delivery inefficiencies and makes it harder to attract new talent. With this offering, modern DevSecOps capabilities and unified workflows are brought directly to the mainframe. This empowers developers to work more collaboratively and efficiently, while helping organizations accelerate innovation and integrate mainframe development into broader digital transformation strategies.\"** - Katie Norton, Research Manager, DevSecOps and Software Supply Chain Security at IDC\n\n## Unified development environments\n\nTrue modernization means more than just updating mainframe development. It means creating a unified platform where mainframe, cloud-native, web, and mobile development teams collaborate seamlessly.\n\nGitLab Ultimate for IBM Z enables developers to use consistent workflows whether they're deploying to z/OS, cloud, or on-premises infrastructure — knowledge transfers between teams instead of staying siloed. Organizations can modernize incrementally without business disruption, as legacy systems continue operating while teams adopt modern practices at their own pace.\n\nAs organizations pursue hybrid cloud strategies, GitLab provides the foundation for applications that span mainframe and cloud-native environments.\n\n## What is GitLab Ultimate for IBM Z?\n\nGitLab Ultimate for IBM Z delivers native z/OS Runner support, enabling seamless CI/CD pipeline execution directly on your mainframe infrastructure. This GitLab-certified solution helps eliminate the need for complex workarounds while maintaining the security and reliability your enterprise applications demand.\n\nThe combination of GitLab's comprehensive DevSecOps platform with IBM's deep mainframe expertise creates something unique in the market: a certified solution that provides a true bridge between enterprise legacy systems and cloud-native innovation.\n\n## GitLab Ultimate for IBM Z capabilities\n\nGitLab Ultimate for IBM Z provides enterprise teams with the tools they need to modernize mainframe development while preserving critical business systems.\n\n**Native z/OS Runner support** helps eliminate security risks and scalability bottlenecks associated with remote connections, while accelerating delivery through CI/CD pipelines that execute directly where your mainframe code resides.\n\n**Unified Source Code Management** modernizes your toolchain by replacing expensive legacy library managers with GitLab's searchable, version-controlled repository system, helping reduce licensing costs and maintenance overhead.\n\n**Seamless integration** with IBM Developer for z/OS Enterprise Edition (IDzEE) delivers faster software releases through dependency-based builds, automated code scanning, and comprehensive debugging tools within familiar developer environments, enhancing both quality and security.\n\n**End-to-end visibility** across mainframe and distributed environments provides comprehensive project management from planning to production, enabling automated DevOps workflows that help retain talent through modern, next-generation development tools.\n\n## Modernize your mainframe development environment today\n\nGitLab Ultimate for IBM Z is available now for organizations ready to transform their mainframe development experience. To learn more, visit the [GitLab and IBM partnership page](https://about.gitlab.com/partners/technology-partners/ibm/).",[282,757,109,709],[1107,1108],"Mike Flouton","Andy Bradfield","https://res.cloudinary.com/about-gitlab-com/image/upload/v1750440008/myqt5vcjlffh8sszw507.png","2025-06-23",{"featured":91,"template":788,"slug":1112},"gitlab-ultimate-for-ibm-z-modern-devsecops-for-mainframes",{"content":1114,"config":1123},{"heroImage":1115,"title":1116,"description":1117,"authors":1118,"date":1119,"body":1120,"category":757,"tags":1121},"https://res.cloudinary.com/about-gitlab-com/image/upload/v1750333905/erq4ak30f6zxjjcsmt4y.png","GitLab 18.1 released","This release includes Maven virtual registry (beta), Duo Code Review, compromised password detection, and SLSA Level 1 with components.",[872],"2025-06-19","This is the article for [GitLab 18.1 release](https://about.gitlab.com/releases/2025/06/19/gitlab-18-1-released/).",[1122,757],"releases",{"featured":91,"template":788,"slug":1124,"externalUrl":1125},"gitlab-18-1-released","https://about.gitlab.com/releases/2025/06/19/gitlab-18-1-released/",{"category":125,"slug":767,"posts":1127},[1128,1142,1153],{"content":1129,"config":1140},{"title":1130,"description":1131,"heroImage":1132,"date":1133,"body":1134,"category":767,"tags":1135,"authors":1136},"Automating role-based access control (RBAC) at scale","This guide details setting up GitLab + Keycloak + OIDC for RBAC, covering planning, Docker configuration, and automated access governance for DevSecOps.","https://res.cloudinary.com/about-gitlab-com/image/upload/v1749659561/Blog/Hero%20Images/securitycheck.png","2025-06-20","Security starts with structure. Building a scalable and secure development platform begins with getting the fundamentals right — especially role-based access control (RBAC).\n\nTo help our customers scale effectively, we developed the RBAC Accelerator — a modular, outcome-driven enablement program that supports large organizations in defining, enforcing, and scaling access policies across GitLab.\n\nThis foundation enables broader transformation. For example, the Secure SDLC Accelerator, built on top of the RBAC Accelerator, empowers customers to integrate compliance, security, and DevSecOps best practices into their workflows.\n\nGitLab customer [Lely](https://www.lelyna.com/us/), a major Dutch manufacturer of agricultural machines and robots, used this approach to migrate to GitLab Dedicated. Lely automated user provisioning via Azure AD using OpenID Connect (OIDC), enforced [least-privilege policies](https://about.gitlab.com/blog/the-ultimate-guide-to-least-privilege-access-with-gitlab/), and created a scalable, reusable access model to support their future development initiatives.\n\nIn this guide, we’ll take you through a hands-on implementation example of GitLab + [Keycloak](https://www.keycloak.org/) + OIDC, covering everything from running the setup in a Docker environment to automating role mapping, designing a scalable group hierarchy, and aligning GitLab access controls with organizational structure and compliance goals.\n\nThis is a local demo setup intended for proof-of-concept purposes only.\n\nWhether you’re just starting out or optimizing at scale, this modular foundation ensures you’re not just securing access — you’re enabling everything that comes next.\n\n## Getting started with access control planning\n\nBefore implementing any tooling, it’s essential to understand your access landscape. \n\nConsider:\n- What GitLab resources need protection (projects, groups, environments)?\n- Who are your personas (Developers, Maintainers, Guests, etc.)?\n- What organizational units (departments, cost centers) should govern access?\n- How does your IdP structure (Keycloak) define users and roles?\n\nUse this stage to draft your:\n- Access control matrix\n- GitLab group hierarchy (team- or product-based)\n- Least privilege policy assumptions\n\nSample group hierarchy \n\n```mermaid\ngraph TD\n    Root[\"Root (Root Group)\"]\n    FirmwareTeam[\"Firmware-Team\"]\n    FirmwareDevelopers[\"Developers (GitLab Developer Role)\"]\n    FirmwareMaintainers[\"Maintainers (GitLab Maintainer Role)\"]\n    FirmwareReporters[\"Reporters (GitLab Reporter Role)\"]\n    HardwareTeam[\"Hardware-Team\"]\n    HardwareDevelopers[\"Developers\"]\n    SoftwareTeam[\"Software-Team\"]\n    SoftwareDevelopers[\"Developers\"]\n    SoftwareMaintainers[\"Maintainers\"]\n    SoftwareReporters[\"Reporters\"]\n    \n    Enterprise --> FirmwareTeam\n    Enterprise --> HardwareTeam\n    Enterprise --> SoftwareTeam\n    \n    FirmwareTeam --> FirmwareDevelopers\n    FirmwareTeam --> FirmwareMaintainers\n    FirmwareTeam --> FirmwareReporters\n    \n    HardwareTeam --> HardwareDevelopers\n    \n    SoftwareTeam --> SoftwareDevelopers\n    SoftwareTeam --> SoftwareMaintainers\n    SoftwareTeam --> SoftwareReporters\n```\n\n## Demo system setup: GitLab + Keycloak in a local Docker environment\n### Prerequisites\n\n- Docker, Docker Compose, OpenSSL\n- GitLab Version 17.7.3 and Keycloak Version 23.0.7 container images\n- Self-signed certificates\n\n### .env configuration\n\nThe demo setup is using the following GitLab and Keycloak versions, ports and secrets.\n\n#### GitLab configuration\n\n```bash\nGITLAB_VERSION=17.7.3-ee.0\nGITLAB_EXTERNAL_URL=http://localhost:8081\nGITLAB_SSH_PORT=8222\n```\n\n####  Keycloak configuration\n\n```bash\nKEYCLOAK_VERSION=latest\nKEYCLOAK_ADMIN=\u003Cyour-admin-username>\nKEYCLOAK_ADMIN_PASSWORD=\u003Cyour-admin-password>\nKEYCLOAK_HTTPS_PORT=8443\nKEYCLOAK_CLIENT_SECRET=\u003Cyour-client-secret>  # Get this from Keycloak after setup\n```\n\n## Generate SSL certificates\n\nTo establish trust between GitLab and Keycloak, especially in a self-hosted Docker environment, we’ll need to generate self-signed SSL certificates. These certificates will enable encrypted HTTPS communication and ensure GitLab can securely talk to Keycloak during the OIDC authentication process.\n\nFor production environments, we recommend using certificates from a trusted Certificate Authority (CA), but for local testing and development, self-signed certificates are sufficient.\n\nFollow these step-by-step instructions:\n\n1. Create a folder for the certificates.\n\n\n``` mkdir -p certs```\n\n2. Generate a self-signed certificate with OpenSSL.\n\n```bash\nopenssl req -x509 -nodes -days 365 -newkey rsa:2048 \\\n  -keyout certs/tls.key \\\n  -out certs/tls.crt \\\n  -subj \"/CN=keycloak\" \\\n  -addext \"subjectAltName=DNS:keycloak,DNS:localhost\"\n```\n\n3. Create a PKCS12 keystore for Keycloak.\n\n\n```bash\nopenssl pkcs12 -export \\\n  -in certs/tls.crt \\\n  -inkey certs/tls.key \\\n  -out certs/keystore.p12 \\\n  -name keycloak \\\n  -password pass:password\n```\n\n## Start the service using Docker compose\n\nNow that we have our certificates, we can stand up our local GitLab + Keycloak environment using Docker Compose:\n\n\n```yaml\nversion: '3.8'\nservices:\n  gitlab:\n    image: gitlab/gitlab-ee:${GITLAB_VERSION}\n    container_name: gitlab\n    restart: unless-stopped\n    environment:\n      GITLAB_OMNIBUS_CONFIG: |\n        external_url '${GITLAB_EXTERNAL_URL:-http://localhost:8081}'\n        gitlab_rails['gitlab_shell_ssh_port'] = ${GITLAB_SSH_PORT:-8222}\n        gitlab_rails['display_initial_root_password'] = true\n\n        # OAuth Configuration\n        gitlab_rails['omniauth_enabled'] = true\n        gitlab_rails['omniauth_allow_single_sign_on'] = ['openid_connect']\n        gitlab_rails['omniauth_block_auto_created_users'] = false\n        gitlab_rails['omniauth_providers'] = [\n            {\n                'name' => 'openid_connect',\n                'label' => 'Keycloak',\n                'args' => {\n                    'name' => 'openid_connect',\n                    'scope' => ['openid', 'profile', 'email'],\n                    'response_type' => 'code',\n                    'issuer' => 'https://localhost:8443/realms/GitLab',\n                    'client_auth_method' => 'query',\n                    'discovery' => false,\n                    'uid_field' => 'preferred_username',\n                    'pkce' => true,\n                    'client_options' => {\n                        'identifier' => 'gitlab',\n                        'secret' => '${KEYCLOAK_CLIENT_SECRET}',\n                        'redirect_uri' => '${GITLAB_EXTERNAL_URL:-http://localhost:8081}/users/auth/openid_connect/callback',\n                        'authorization_endpoint' => 'https://localhost:8443/realms/GitLab/protocol/openid-connect/auth',\n                        'token_endpoint' => 'https://keycloak:8443/realms/GitLab/protocol/openid-connect/token',\n                        'userinfo_endpoint' => 'https://keycloak:8443/realms/GitLab/protocol/openid-connect/userinfo',\n                        'jwks_uri' => 'https://keycloak:8443/realms/GitLab/protocol/openid-connect/certs'\n                    }\n                }\n            }\n        ]\n    volumes:\n      - gl-config:/etc/gitlab\n      - gl-data:/var/opt/gitlab\n      - ./certs/tls.crt:/etc/gitlab/trusted-certs/keycloak.crt\n    ports:\n      - '${GITLAB_EXTERNAL_PORT:-8081}:8081'\n      - '${GITLAB_SSH_PORT:-8222}:22'\n    shm_size: '256m'\n\n  keycloak:\n    image: quay.io/keycloak/keycloak:${KEYCLOAK_VERSION}\n    container_name: keycloak-server\n    restart: unless-stopped\n    command: [\n      \"start-dev\",\n      \"--import-realm\",\n      \"--https-port=${KEYCLOAK_HTTPS_PORT}\",\n      \"--https-key-store-file=/etc/x509/https/keystore.p12\",\n      \"--https-key-store-password=password\"\n    ]\n    volumes:\n      - ./data:/opt/keycloak/data/import\n      - ./certs:/etc/x509/https\n    environment:\n      KEYCLOAK_ADMIN: ${KEYCLOAK_ADMIN}\n      KEYCLOAK_ADMIN_PASSWORD: ${KEYCLOAK_ADMIN_PASSWORD}\n    ports:\n      - \"${KEYCLOAK_HTTPS_PORT}:8443\"\n\nvolumes:\n  gl-config:\n  gl-data:\n```\n\n  \nRun the `docker-compose up -d` command and your GitLab + Keycloak environment will be up in minutes.\n\n\n```\ndocker-compose up -d\n```\n\n## Keycloak realm configuration\n\nYour Keycloak realm is automatically configured on startup as it's defined in the `docker-compose` file.\n\nThe realm configuration will include:\n\n- Pre-configured GitLab client\n- Default client secret\n\nYou can access Keycloak admin console at `https://localhost:8443` with:\n\n- Username: admin\n- Password: from your `.env` file\n- To verify the setup:\n  - Log into Keycloak admin console\n  - Select the **GitLab** realm\n  - Check **Clients > gitlab**\n\n\nVerify the client configuration matches your environment.\n\nTo showcase the automated RBAC mechanism, you will need to follow these steps:\n\n- Map realm roles to GitLab roles\n- Create group structure with mapping roles, matching the Group, Sub-group, Project pattern in GitLab.\n\nBefore provisioning your first users to the user groups, it’s recommended to log into your GitLab instance to retrieve your instance root password:\n\n1. Access GitLab at `http://localhost:8081`.\n\n2. Get the root password:\n\n``` \ndocker exec gitlab grep 'Password:' `/etc/gitlab/initial_root_password`\n\n```\n\n3. Log in as root with the retrieved password.\n\n## Putting it all together\n\nTo demonstrate the power of this integrated RBAC model, start by walking through a real-world user journey — from identity to access. \n\nBegin in Keycloak by showcasing a user assigned to specific realm roles (e.g., developer, maintainer) and groups (e.g., /engineering/platform). These roles have been mapped to GitLab access levels via OIDC claims, while group affiliations align with GitLab’s structured hierarchy of root groups, sub-groups, and projects. \n\nUpon login through GitLab’s SSO Keycloak endpoint, the user is automatically provisioned into the correct group and assigned the appropriate role — with no manual intervention. \n\nWithin GitLab, you can see that the  user can interact with the assigned project: For example, a developer might push code and open a merge request, but not merge to protected branches — validating the least-privilege model. \n\nFinally, you can showcase access across multiple teams or products that are managed centrally in Keycloak, yet enforced precisely in GitLab through group sync and permissions inheritance. This demo illustrates not just role assignment, but how GitLab and Keycloak together deliver real-time, automated access governance at scale — ready for secure, compliant, enterprise-grade software development.\n\n## Why GitLab?\n\nGitLab’s comprehensive, intelligent DevSecOps platform is the ideal foundation for secure, scalable access management. With native OIDC support, granular role enforcement, SCIM-based user provisioning, and built-in audit logging, GitLab allows organizations to centralize control without compromising agility. Its flexible group hierarchy mirrors enterprise structure, making it easy to manage access across teams. \n\nIntegrating with identity providers like Keycloak automates onboarding, ensures least-privilege access, and creates a seamless identity-to-permission pipeline that supports regulatory and security goals. As a core component of GitLab’s security capabilities, RBAC ties directly into CI/CD, policy enforcement, and vulnerability management workflows.\n\n## Summary\nRBAC is just the beginning. With GitLab and Keycloak, you’re not just securing access — you’re enabling structured, automated governance that scales. As you expand into policy enforcement, Secure SDLC, and DevSecOps automation, this foundation becomes a launchpad for sustainable, enterprise-grade software delivery.\n\n> Get started with RBAC in GitLab today with a free, 60-day trial of GitLab Ultimate. [Sign up today](https://about.gitlab.com/free-trial/)!",[767],[1137,1138,1139],"James Wormwell","Paul Meresanu","Kees Valkhof",{"featured":91,"template":788,"slug":1141},"automating-role-based-access-control-rbac-at-scale",{"content":1143,"config":1151},{"title":1144,"description":1145,"authors":1146,"heroImage":963,"date":1148,"body":1149,"category":767,"tags":1150},"Last year we signed the Secure by Design pledge - here's our progress","Learn about GitLab's CISA-aligned additions and improvements around MFA, default password reduction, patching, and vulnerability disclosure.",[1147],"Joseph Longo","2025-06-09","A little over a year go, GitLab signed [CISA’s Secure by Design Pledge](https://about.gitlab.com/blog/secure-by-design-principles-meet-devsecops-innovation-in-gitlab-17/), a directive for technology providers to embed security at the heart of their products from the outset of development. Since then, we've made significant progress towards improving our security posture and creating a more secure ecosystem for our customers to develop secure software faster.\n\n## Meeting the security goals\n\nLet’s explore the additions and improvements we've made to further enhance security across the development lifecycle.\n\n### Multi-factor authentication (MFA)\n\n***Goal: Within one year of signing the pledge, demonstrate actions taken to measurably increase the use of multi-factor authentication across the manufacturer’s products.***\n\nGitLab currently offers multiple [MFA](https://docs.gitlab.com/ee/user/profile/account/two_factor_authentication.html) options for users to secure their accounts. We also offer SSO functionality to enable [GitLab.com](https://docs.gitlab.com/ee/user/group/saml_sso/), [Self-Managed](https://docs.gitlab.com/integration/saml/), and [GitLab Dedicated](https://docs.gitlab.com/integration/saml/) customers to streamline their authentication processes and their internal MFA requirements.\n\nTo further enhance the platform’s resilience, and to create a more secure foundation for our customers, GitLab is executing a phased MFA by Default rollout.\n\nIn the coming months, we will deploy changes requiring all customers to enable MFA on their accounts. \n\nFor customers who already have MFA enabled or authenticate to GitLab via their organization’s single sign-on (SSO) method, there will be no necessary changes. For customers who do not already have MFA enabled and are not authenticating to GitLab via their organization’s SSO method, they will be required to enable MFA and enroll in one or more of the available MFA methods.\n\nThe MFA rollout will occur in stages to ensure a smooth and consistent adoption across all customers. More details on GitLab’s MFA by Default rollout will be shared in the near future.\n\n### Default passwords\n\n***Goal: Within one year of signing the pledge, demonstrate measurable progress towards reducing default passwords across the manufacturers’ products.***\n\nTo reduce the use of default passwords, GitLab uses randomly generated root passwords for its multiple installation methods. GitLab’s multi-method [installation instructions](https://docs.gitlab.com/ee/install/install_methods.html) also include guidance on how to change the randomly generated root password for each installation.\n\nFor some install methods, such as installing GitLab in a Docker container, the password file with the initial root password is deleted in the first container restart after 24 hours to help further harden the GitLab instance.\n\n### Reducing entire classes of vulnerabilities\n\n***Goal: Within one year of signing the pledge, demonstrate actions taken towards enabling a significant measurable reduction in the prevalence of one or more vulnerability classes across the manufacturer’s products.***\n\nGitLab has published [secure coding guidelines](https://docs.gitlab.com/ee/development/secure_coding_guidelines.html#sast-coverage) to its documentation site that contains descriptions and guidelines for addressing security vulnerabilities commonly identified in the GitLab codebase. \n\nThe guidelines are “intended to help developers identify potential security vulnerabilities early, with the goal of reducing the number of vulnerabilities released over time.” \n\nGitLab continues to improve its [SAST rule coverage](https://docs.gitlab.com/development/secure_coding_guidelines#sast-coverage) to address broader sets of security vulnerabilities for itself and its customers.\n\n### Security patches\n\n***Goal: Within one year of signing the pledge, demonstrate actions taken to measurably increase the installation of security patches by customers.***\n\nGitLab handles all updates related to its GitLab.com and GitLab Dedicated service offerings. Additionally, GitLab publishes a [maintenance policy](https://docs.gitlab.com/ee/policy/maintenance.html), which outlines its approach to releasing updates, backporting, upgrade recommendations and supporting documentation, etc.\n\nGitLab’s documentation has comprehensive guidance on [how to upgrade](https://docs.gitlab.com/ee/update/?tab=Self-compiled+%28source%29#upgrade-based-on-installation-method) self-managed instances based on their deployment model. This includes Omnibus, Helm chart, Docker and self-compiled GitLab installations.\n\nGitLab also provides a detailed [upgrade plan](https://docs.gitlab.com/ee/update/plan_your_upgrade.html) to ensure proper testing and troubleshooting can be performed as well as rollback plans if necessary.\n\nDepending on the version upgrade, specific changes ([example for GitLab 17](https://docs.gitlab.com/ee/update/versions/gitlab_17_changes.html)) for each version are highlighted to ensure a smooth upgrade process and limit unavailability of services.\n\n### Vulnerability disclosure policy\n\n***Goal: Within one year of signing the pledge, publish a vulnerability disclosure policy (VDP).***\n\nGitLab maintains a strong bug bounty program through [HackerOne](https://hackerone.com/gitlab?type=team), a [security.txt](https://gitlab.com/.well-known/security.txt) file highlighting GitLab’s preferred and additional disclosure processes, and [release posts](https://about.gitlab.com/releases/categories/releases/) highlighting security fixes.\n\nCustomers and the general public can subscribe to receive GitLab’s release posts directly in their email inbox.\n\n### Common vulnerability enumerations \n\n***Goal: Within one year of signing the pledge, demonstrate transparency in vulnerability reporting***\n\nGitLab includes the Common Weakness Enumeration (CWE) field in all Common vulnerability enumerations (CVE) records it publishes. Over the past year, GitLab has iterated to also include the Common Platform Enumeration (CPE) field in CVE records.\n\nThe GitLab [CVE assignments project](https://gitlab.com/gitlab-org/cves) stores a copy of all CVE identifiers assigned and published by GitLab in its role as a CVE Numbering Authority.\n\n> Check out [GitLab’s CVE submission template](https://gitlab.com/gitlab-org/cves/-/blob/master/.gitlab/issue_templates/Internal%20GitLab%20Submission.md?ref_type=heads).\n\n### Evidence of intrusions\n\n***Goal: Within one year of signing the pledge, demonstrate a measurable increase in the ability for customers to gather evidence of cybersecurity intrusions affecting the manufacturer’s products.***\n\nGitLab has published an [incident response guide](https://docs.gitlab.com/ee/security/responding_to_security_incidents.html) to help customers respond to incidents involving GitLab instances. Additionally, GitLab has open sourced versions of its [GUARD detection-as-code](https://about.gitlab.com/blog/unveiling-the-guard-framework-to-automate-security-detections-at-gitlab/) and TLDR threat detection frameworks. The repositories for those open source frameworks can be found on [GitLab’s Open Source Security Center](https://about.gitlab.com/security/open-source-resources/).\n\nIn a similar manner, GitLab is adding functionality to its [GitLab.com](http://gitLab.com) service offering to [detect compromised passwords](https://about.gitlab.com/blog/introducing-compromised-password-detection-for-gitlab-com/) for all logins using GitLab’s native username and password authentication method.\n\n## What's next\n\n[GitLab’s Security Division’s mission](https://gitlab.com/gitlab-com/gl-security) is to enable everyone to innovate and succeed on a safe, secure, and trusted DevSecOps platform.\n\nGitLab's security enhancements over the past year have allowed us to demonstrate our commitment to CISA’s Secure by Design Pledge, and they have strengthened our platform and given customers a more reliable and secure foundation to build on. \n\nOur commitment to iteration means we're already focused on the next set of innovations that will drive us forward.\n\n> To learn more about GitLab’s security enhancements, bookmark our [security page on the GitLab Blog](https://about.gitlab.com/blog/categories/security/).\n\n## Read more  \n- [Secure by Design principles meet DevSecOps innovation in GitLab 17](https://about.gitlab.com/blog/secure-by-design-principles-meet-devsecops-innovation-in-gitlab-17/)\n- [Happy birthday, Secure by Design!](https://about.gitlab.com/blog/happy-birthday-secure-by-design/)\n- [Strengthen your cybersecurity strategy with Secure by Design](https://about.gitlab.com/the-source/security/strengthen-your-cybersecurity-strategy-with-secure-by-design/)",[709,475,767,184],{"slug":1152,"featured":91,"template":788},"last-year-we-signed-the-secure-by-design-pledge-heres-our-progress",{"content":1154,"config":1162},{"title":1155,"description":1156,"authors":1157,"heroImage":1158,"date":1159,"body":1160,"category":767,"tags":1161},"Tutorial: Secure and optimize your Maven Repository in GitLab","Learn the best practices, advanced techniques, and upcoming features that improve the efficiency of your DevSecOps workflow.",[872],"https://res.cloudinary.com/about-gitlab-com/image/upload/v1749666187/Blog/Hero%20Images/blog-image-template-1800x945__6_.png","2025-05-22","As a GitLab product manager, I'm excited to share insights on securing and optimizing your Maven repository. We're passionate about providing a complete DevSecOps platform, and the Maven repository is part of this ecosystem. Explore best practices, advanced techniques, and upcoming features that will transform your Maven workflow.\n\n## Securing your Maven repository: A comprehensive approach\n\nSecuring your software supply chain is more critical than ever so let's dive into strategies to fortify your Maven packages in GitLab.\n\n### Implement strong authentication\n\n**Personal access tokens:** Use PATs for fine-grained access control.\n\nFor example:\n\n```bash\nmvn deploy -s settings.xml\n```\n\nWhere `settings.xml` contains:\n\n```xml\n\u003Csettings>\n  \u003Cservers>\n    \u003Cserver>\n      \u003Cid>gitlab-maven\u003C/id>\n      \u003Cconfiguration>\n        \u003ChttpHeaders>\n          \u003Cproperty>\n            \u003Cname>Private-Token\u003C/name>\n            \u003Cvalue>${env.GITLAB_PERSONAL_TOKEN}\u003C/value>\n          \u003C/property>\n        \u003C/httpHeaders>\n      \u003C/configuration>\n    \u003C/server>\n  \u003C/servers>\n\u003C/settings>\n```\n\n**Deploy tokens:** Ideal for CI/CD pipelines. Generate these in your GitLab project settings and use them in your `.gitlab-ci.yml`.\n\n```yaml\ndeploy:\n  script:\n    - 'mvn deploy -s ci_settings.xml'\n  variables:\n    MAVEN_CLI_OPTS: \"-s ci_settings.xml --batch-mode\"\n    MAVEN_OPTS: \"-Dmaven.repo.local=.m2/repository\"\n  only:\n    - main\n```\n\nThe corresponding `ci_settings.xml` file:\n\n```xml\n\u003Csettings xmlns=\"http://maven.apache.org/SETTINGS/1.1.0\" xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\"\n  xsi:schemaLocation=\"http://maven.apache.org/SETTINGS/1.1.0 http://maven.apache.org/xsd/settings-1.1.0.xsd\">\n  \u003Cservers>\n    \u003Cserver>\n      \u003Cid>gitlab-maven\u003C/id>\n      \u003Cconfiguration>\n        \u003ChttpHeaders>\n          \u003Cproperty>\n            \u003Cname>Deploy-Token\u003C/name>\n            \u003Cvalue>${env.CI_DEPLOY_PASSWORD}\u003C/value>\n          \u003C/property>\n        \u003C/httpHeaders>\n      \u003C/configuration>\n    \u003C/server>\n  \u003C/servers>\n\u003C/settings>\n```\n\nIn this setup:\n\n* The `CI_DEPLOY_PASSWORD` should be set as a CI/CD variable in your GitLab project settings containing the deploy token.\n* The `\u003Cid>` should match the repository ID in your project's `pom.xml` file.\n\n**Token rotation:** Implement a token rotation policy using GitLab's API. For example, you could create a scheduled pipeline that rotates tokens monthly:\n\n```yaml\nrotate_tokens:\n  script:\n    - curl --request POST \"https://gitlab.example.com/api/v4/projects/${CI_PROJECT_ID}/deploy_tokens\" --header \"PRIVATE-TOKEN: ${ADMIN_TOKEN}\" --form \"name=maven-deploy-${CI_PIPELINE_ID}\" --form \"scopes[]=read_registry\" --form \"scopes[]=write_registry\"\n  only:\n    - schedules\n```\n\n### Leverage GitLab's built-in security features\n\n**Dependency Scanning:** Enable it in your `.gitlab-ci.yml`.\n\n```yaml\ninclude:\n  - template: Security/Dependency-Scanning.gitlab-ci.yml\n\nvariables:\n  DS_JAVA_VERSION: 11\n```\n\n**Container Scanning:** If you're containerizing your Maven applications.\n\n```yaml\ninclude:\n  - template: Security/Container-Scanning.gitlab-ci.yml\n\nvariables:\n  CS_IMAGE: $CI_REGISTRY_IMAGE:$CI_COMMIT_SHA\n```\n\n**License Compliance:** Ensure all dependencies comply with your project's licensing requirements.\n\n```yaml\ninclude:\n  - template: Security/License-Scanning.gitlab-ci.yml\n```\n\n### Secure your CI/CD pipeline\n\n* **CI/CD variables:** Store sensitive information securely.\n\n  ```yaml\n  variables:\n    MAVEN_REPO_USER: ${CI_DEPLOY_USER}\n    MAVEN_REPO_PASS: ${CI_DEPLOY_PASSWORD}\n  ```\n* **Masked variables:** Prevent exposure in job logs. Set these in your GitLab CI/CD settings.\n* **Protected branches and tags:** Configure these in your GitLab project settings to control who can trigger package publishing.\n\n### Implement package signing\n\n* Use the Maven GPG plugin to sign your artifacts.\n\n  ```xml\n  \u003Cplugin>\n    \u003CgroupId>org.apache.maven.plugins\u003C/groupId>\n    \u003CartifactId>maven-gpg-plugin\u003C/artifactId>\n    \u003Cversion>1.6\u003C/version>\n    \u003Cexecutions>\n      \u003Cexecution>\n        \u003Cid>sign-artifacts\u003C/id>\n        \u003Cphase>verify\u003C/phase>\n        \u003Cgoals>\n          \u003Cgoal>sign\u003C/goal>\n        \u003C/goals>\n      \u003C/execution>\n    \u003C/executions>\n  \u003C/plugin>\n  ```\n\n* Store your GPG key securely using GitLab CI/CD variables.\n\n### Control package access\n\n* Use GitLab's project and group-level package registry settings to restrict access.\n* Implement IP allowlists for network-level access control in your GitLab instance settings.\n\n## Optimize performance: Streamline your Maven workflow\n\nEfficiency is crucial when working with large projects or numerous dependencies. Here are advanced techniques to optimize your Maven package usage in GitLab.\n\n### Utilize dependency management\n\n* Use the `\u003CdependencyManagement>` section in your parent POM.\n\n  ```xml\n  \u003CdependencyManagement>\n    \u003Cdependencies>\n      \u003Cdependency>\n        \u003CgroupId>org.springframework.boot\u003C/groupId>\n        \u003CartifactId>spring-boot-dependencies\u003C/artifactId>\n        \u003Cversion>${spring-boot.version}\u003C/version>\n        \u003Ctype>pom\u003C/type>\n        \u003Cscope>import\u003C/scope>\n      \u003C/dependency>\n    \u003C/dependencies>\n  \u003C/dependencyManagement>\n  ```\n### Leverage multi-module projects\n\n  * Structure your project with a parent POM and multiple modules:\n\n    ```\n    my-project/\n    ├── pom.xml\n    ├── module1/\n    │   └── pom.xml\n    ├── module2/\n    │   └── pom.xml\n    └── module3/\n        └── pom.xml\n    ```\n  * Use Maven's reactor to build modules in the optimal order:\n\n    ```bash\n    mvn clean install\n    ```\n\n### Implement parallel builds\n\n* Use Maven's parallel build feature:\n\n  ```bash\n  mvn -T 4C clean install\n  ```\n\n### Optimize for CI/CD\n\n* In `.gitlab-ci.yml`, use caching to speed up builds:\n\n  ```yaml\n  cache:\n    paths:\n      - .m2/repository\n\n  build:\n    script:\n      - mvn clean package -Dmaven.repo.local=$CI_PROJECT_DIR/.m2/repository\n  ```\n* Implement incremental builds:\n\n  ```yaml\n  build:\n    script:\n      - mvn clean install -Dmaven.repo.local=$CI_PROJECT_DIR/.m2/repository -am -amd -fae\n  ```\n\n### Utilize build caching\n\n* Use the Gradle Enterprise Maven Extension for build caching:\n\n  ```xml\n  \u003Cbuild>\n    \u003Cplugins>\n      \u003Cplugin>\n        \u003CgroupId>com.gradle\u003C/groupId>\n        \u003CartifactId>gradle-enterprise-maven-plugin\u003C/artifactId>\n        \u003Cversion>1.9\u003C/version>\n        \u003Cconfiguration>\n          \u003CgradleEnterprise>\n            \u003Cserver>https://ge.example.com\u003C/server>\n            \u003CallowUntrusted>false\u003C/allowUntrusted>\n          \u003C/gradleEnterprise>\n        \u003C/configuration>\n      \u003C/plugin>\n    \u003C/plugins>\n  \u003C/build>\n  ```\n\n## Introducing the Maven Virtual Registry beta program\n\nI'm thrilled to announce the launch of our beta program for the upcoming Maven virtual registry feature. This addition to our package ecosystem will change how you manage Maven repositories in GitLab.\n\n### Key features of Maven Virtual Registry\n\n1. **Repository aggregation:** Combine multiple Maven repositories (both internal and external) into a single virtual repository.\n2. **Smart proxy and caching:** Improve build times by caching artifacts and intelligently routing requests.\n3. **Centralized Access Control:** Enhance security by managing access to all repositories from a single point.\n\n### How it works\n\n1. **Configuration:** Configure Maven authentication in your `settings.xml`:\n\n```\n\u003Csettings>\n  \u003Cservers>\n    \u003Cserver>\n      \u003Cid>gitlab-maven\u003C/id>\n      \u003Cconfiguration>\n        \u003ChttpHeaders>\n          \u003Cproperty>\n            \u003Cname>Private-Token\u003C/name>\n            \u003Cvalue>${env.GITLAB_TOKEN}\u003C/value>\n          \u003C/property>\n        \u003C/httpHeaders>\n      \u003C/configuration>\n    \u003C/server>\n  \u003C/servers>\n\u003C/settings>\n```\n\nAuthentication options:\n\n- Personal access token: Use `Private-Token` as the name and `${env.GITLAB_TOKEN}` as the value.\n\n-  Group deploy token: Use `Deploy-Token` as the name and `${env.GITLAB_DEPLOY_TOKEN}` as the value.\n\n- Group access token: Use `Private-Token` as the name and `${env.GITLAB_ACCESS_TOKEN}` as the value.\n\n- CI job token: Use `Job-Token` as the name and `${CI_JOB_TOKEN}` as the value.\n\n- Configure the virtual registry in your `pom.xml`.\n\nOption 1: As an additional registry:\n\n```\n\u003Crepositories>\n  \u003Crepository>\n    \u003Cid>gitlab-maven\u003C/id>\n    \u003Curl>https://gitlab.example.com/api/v4/virtual_registries/packages/maven/\u003Cvirtual registry id>\u003C/url>\n  \u003C/repository>\n\u003C/repositories>\n```\n\nOption 2: As a replacement for Maven Central (in your `settings.xml`):\n\n```\n\u003Cmirrors>\n  \u003Cmirror>\n    \u003Cid>gitlab-maven\u003C/id>\n    \u003Cname>GitLab virtual registry for Maven Central\u003C/name>\n    \u003Curl>https://gitlab.example.com/api/v4/virtual_registries/packages/maven/\u003Cvirtual registry id>\u003C/url>\n    \u003CmirrorOf>central\u003C/mirrorOf>\n  \u003C/mirror>\n\u003C/mirrors>\n```\n\n2. **Usage:** Now all your Maven operations will use the virtual repository.\n\n```\n# For personal access tokens\nexport GITLAB_TOKEN=your_personal_access_token\n\n# For group deploy tokens\nexport GITLAB_DEPLOY_TOKEN=your_deploy_token\n\n# For group access tokens\nexport GITLAB_ACCESS_TOKEN=your_access_token\n\n# Then run Maven commands normally\nmvn package\n\n```\n\n3. Benefits\n\n- Simplified dependency management\n- Improved build times\n- Enhanced security and compliance\n- Better control over third-party dependencies\n\n### Join the beta program\n\nWe're actively seeking participants for our beta program. As a beta tester, you'll have the opportunity to:\n\n* Get early access to the Maven Virtual Registry feature.\n* Provide direct feedback to our development team.\n* Shape the future of Maven package management in GitLab.\n* Participate in exclusive webinars and Q&A sessions with our product team.\n\n> To join the beta program or learn more about the Maven Virtual Registry, please visit the [GitLab Maven Virtual Registry Beta Program](https://gitlab.com/gitlab-org/gitlab/-/issues/498139) (**Note:** This is a placeholder link).\n\n## Summary\n\nAt GitLab, we're committed to providing cutting-edge tools for secure, efficient, and scalable software development. The Maven Virtual Registry is just one example of how we're continuously innovating to meet the evolving needs of developers and platform engineers.\n\nImplementing the security measures and optimization techniques discussed in this post and leveraging upcoming features like the Maven Virtual Registry can improve your Maven workflow within GitLab.\n\nWe're excited about the future of package management in GitLab and can't wait to see how you'll use these features to take your development process to the next level. Stay tuned for more updates and happy coding!",[767,475,785,757,916],{"slug":1163,"featured":91,"template":788},"tutorial-secure-and-optimize-your-maven-repository-in-gitlab",{"content":1165,"config":1168},{"title":1102,"description":1103,"body":1104,"category":757,"tags":1166,"authors":1167,"heroImage":1109,"date":1110},[282,757,109,709],[1107,1108],{"featured":91,"template":788,"slug":1112},[1170,1175,1180],{"content":1171,"config":1174},{"title":1130,"description":1131,"heroImage":1132,"date":1133,"body":1134,"category":767,"tags":1172,"authors":1173},[767],[1137,1138,1139],{"featured":91,"template":788,"slug":1141},{"content":1176,"config":1179},{"heroImage":1115,"title":1116,"description":1117,"authors":1177,"date":1119,"body":1120,"category":757,"tags":1178},[872],[1122,757],{"featured":91,"template":788,"slug":1124,"externalUrl":1125},{"content":1181,"config":1184},{"title":1053,"description":1054,"authors":1182,"heroImage":1057,"body":1058,"date":1059,"category":747,"tags":1183},[1056],[984,903,267],{"featured":91,"template":788,"slug":1062},[1186,1200,1212],{"content":1187,"config":1197},{"title":1188,"description":1189,"authors":1190,"heroImage":1192,"date":1193,"body":1194,"category":90,"tags":1195},"GitLab Patch Release: 18.0.2, 17.11.4, 17.10.8","Learn more about this patch release for GitLab Community Edition (CE) and Enterprise Edition (EE).",[1191],"Costel Maxim","https://res.cloudinary.com/about-gitlab-com/image/upload/v1749661926/Blog/Hero%20Images/security-patch-blog-image-r2-0506-700x400-fy25_2x.jpg","2025-06-11","This is the [patch release for GitLab Community Edition (CE) and Enterprise Edition (EE)](https://about.gitlab.com/releases/2025/06/11/patch-release-gitlab-18-0-2-released/).",[757,1196],"patch releases",{"featured":6,"template":788,"externalUrl":1198,"slug":1199},"https://about.gitlab.com/releases/2025/06/11/patch-release-gitlab-18-0-2-released/","gitlab-patch-release-18-0-2-17-11-4-17-10-8",{"content":1201,"config":1210},{"title":1202,"description":1203,"authors":1204,"heroImage":1207,"date":822,"body":1208,"category":757,"tags":1209},"AI-native GitLab Premium: Transform higher education software development","The DevSecOps platform's enterprise-grade features for academic workflows, data protection, and support ensure better collaboration, security, and efficiency.",[1205,1206],"Jessica Hurwitz","Elisabeth Burrows","https://res.cloudinary.com/about-gitlab-com/image/upload/v1749659537/Blog/Hero%20Images/display-article-image-0679-1800x945-fy26.png","Educational institutions increasingly rely on modern software development practices to support teaching, research, and administrative functions. As development needs grow more complex in university and college environments, GitLab Premium with Duo provides essential capabilities that address the unique challenges faced by higher education – particularly around open source development, remote collaboration, and enterprise-grade security.\n\nGitLab's comprehensive, intelligent DevSecOps platform delivers value that extends far beyond fundamental version control. Built on an open source foundation with enterprise-grade features, GitLab Premium helps prevent costly security incidents involving student data, provides cloud-based development environments for distributed teams, and offers the professional support that educational institutions need for mission-critical systems. And now [Premium includes GitLab Duo AI essentials](https://about.gitlab.com/blog/gitlab-premium-with-duo/) Code Suggestions and Chat at no additional cost.\n\n\u003Cdiv style=\"padding:56.25% 0 0 0;position:relative;\">\u003Ciframe src=\"https://player.vimeo.com/video/1083723619?badge=0&amp;autopause=0&amp;player_id=0&amp;app_id=58479\" frameborder=\"0\" allow=\"autoplay; fullscreen; picture-in-picture; clipboard-write; encrypted-media\" style=\"position:absolute;top:0;left:0;width:100%;height:100%;\" title=\"GitLab Premium with Duo Core\">\u003C/iframe>\u003C/div>\u003Cscript src=\"https://player.vimeo.com/api/player.js\">\u003C/script>\n\n## The unique development environment in higher education\n\nUniversities and colleges operate in a distinctly challenging technical environment. Development teams must support multidisciplinary collaboration across technical and non-technical departments while managing vast amounts of sensitive data – from student records and financial information to research findings and faculty evaluations.\n\nMost institutions face these challenges with limited IT resources, yet must support thousands of concurrent users across numerous projects and research initiatives. Research integrity requirements add another layer of complexity, as development work often needs to maintain traceability and reproducibility standards.\n\n## Premium solutions for educational institutions\n\nGitLab Premium with Duo has the functionality that higher education needs.\n\n### Enhanced collaboration and workflow capabilities\n\nCross-departmental projects are common in educational settings – from multi-department research initiatives to custom module development for systems like Ellucian Banner, an enterprise resource planning application used by higher education. These complex projects require sophisticated workflow management that goes beyond basic version control. \n\nGitLab Premium addresses these challenges with powerful collaboration and project visualization features, including epics, roadmaps, and advanced Kanban boards for Agile development workflows. When you assign multiple approvers to certain merge requests and protected branches, you ensure higher code quality and accountability across teams. These tools allow institutions to coordinate work across departments while aligning with institution-wide objectives – essential for managing multiphase campus technology initiatives.\n\nIn Australia, [Deakin University’s](https://about.gitlab.com/customers/deakin-university/) enablement team uses GitLab to build standardized processes and reusable templates — such as custom merge request templates, templated build pipelines, and a security and compliance framework — that can be shared with the broader university community and citizen developers, driving innovation and collaboration both inside the university and with key partners. “We were trying to bring in a community of practice and help it thrive for quite some time, but we were never successful until we had this tool,” said Aaron Whitehand, director of Digital Enablement at Deakin University.\n\n> #### Read more about [how Deakin University uses GitLab to drive improvements](https://about.gitlab.com/customers/deakin-university/) in collaboration and productivity, including a 60% reduction in manual tasks.\n\n### Advanced data protection and governance\n\nEducational institutions generate and manage vast amounts of data, ranging from student records and financial information to research findings and faculty evaluations. The security stakes are particularly high. The [2023 MOVEit breach](https://universitybusiness.com/in-just-3-months-this-data-breach-has-compromised-nearly-900-institutions/), which spanned three months and compromised approximately 900 educational institutions, exposed the sensitive information of more than 62 million people. This demonstrates the critical need for proactive security measures integrated directly into higher education development workflows. \n\nVulnerability scanning stops code releases that contain security risks, enabling institutions to establish and enforce governance protocols that protect sensitive information. These capabilities help universities implement proper access controls and permission structures for research databases, creating a secure framework where authorized researchers maintain appropriate access – effectively balancing robust protection with necessary collaboration.\n\nGitLab is built from the ground up to secure your source code. Scalable Git-based repositories, granular access controls, and built-in compliance features eliminate bottlenecks in your workflow while meeting security requirements. GitLab Premium provides audit tracking and compliance capabilities essential for educational environments. Complete audit trails capture detailed logs of all code changes, access attempts, and system modifications with timestamps and user attribution. Full change management documentation ensures traceability of who made what changes, when, and why – critical for research integrity – while access control auditing monitors repository access and permissions changes. \n\n### Cloud-based development environments and remote collaboration\n\nModern educational institutions require flexible development environments that support distributed teams, remote learning scenarios, and diverse technical requirements. GitLab Premium provides:\n\n* **[GitLab Workspaces](https://docs.gitlab.com/user/workspace/):** Cloud-based development environments accessible from any device  \n* **[Web IDE integration](https://docs.gitlab.com/user/project/web_ide/):** Browser-based coding with full GitLab feature integration  \n* **[Container-based development](https://about.gitlab.com/blog/build-and-run-containers-in-remote-development-workspaces/):** Consistent, reproducible development environments across different projects and user groups\n\nThese capabilities are particularly valuable for supporting remote and hybrid learning models, enabling students and researchers to access standardized development environments regardless of their physical location or local hardware constraints.\n\n### Professional support for critical systems\n\nSmall IT teams in educational settings often support large, complex infrastructure with minimal resources. Reaching out to user forums for answers doesn't always mean you'll get an accurate reply and isn't efficient for large teams. GitLab Premium includes dedicated professional support, providing faster issue resolution and upgrade assistance during critical periods like class enrollment or research deadlines.\n\nThis minimizes downtime for critical services and ensures continuity of operations during peak usage periods, giving stretched IT departments the enterprise-grade reliability they need for essential academic systems.\n\n### Built on open source with enterprise capabilities\n\nOpen source software is developed collaboratively in a public manner, with source code freely available for anyone to view, modify, and distribute. This development model fosters innovation through community contributions and ensures transparency in how software functions. GitLab's open source foundation resonates strongly with educational institutions' values around collaboration, transparency, and community contribution. GitLab Premium features extend this foundation with enterprise-grade capabilities while maintaining the ability to contribute back to the open source ecosystem.\n\nKey open source advantages include:\n\n* **Transparency:** Complete visibility into platform capabilities and security measures – you can examine exactly how the software works  \n* **Community contribution:** Ability to contribute improvements back to the broader community and benefit from global developer expertise  \n* **Vendor independence:** Reduced lock-in risk with open source alternatives and the freedom to modify code as needed  \n* **Co-creation opportunities:** Collaborative development with the broader community, including other educational institutions, to build shared solutions\n\n### AI assistant for software development tasks\n\nGitLab Premium with [Duo](https://about.gitlab.com/gitlab-duo/) brings powerful AI-native capabilities directly into the development workflow, including:  \n* [**Code Suggestions**](https://docs.gitlab.com/user/project/repository/code_suggestions/), which provides real-time code completion and suggestions, helping developers write code faster and more efficiently  \n* [**Chat**](https://docs.gitlab.com/user/gitlab_duo_chat/), which allows team members to get instant answers to questions, troubleshoot issues, and access documentation directly within the GitLab environment\n\nThese AI tools significantly enhance productivity, reduce errors, and streamline collaboration, making GitLab Premium an even more valuable asset for software development teams in higher education.\n\n### Transparency at the core\n\nHigher education institutions handle incredibly sensitive data — from student records and research findings to proprietary academic work and federal grant information. \n\nThe [GitLab AI Transparency Center ](https://about.gitlab.com/ai-transparency-center/)demonstrates our commitment to transparency, accountability, and protection of customer data and intellectual property, providing the privacy guarantees that educational institutions require.\n\nGitLab launched the AI Transparency Center to help customers, community, and team members better understand how GitLab upholds ethics and transparency in our AI-powered features. \n\nOur publicly available documentation highlights the comprehensive measures we take to protect your institution's data and intellectual property. [GitLab's AI Ethics Principles for Product Development](https://handbook.gitlab.com/handbook/legal/ethics-compliance-program/ai-ethics-principles/) guide us as we continue to build and evolve our AI functionality, helping higher education organizations harness the promise of AI while maintaining complete control and oversight of their most valuable information assets.\n\n## Get started with GitLab Premium today\n\nFor educational institutions, GitLab Premium with Duo represents a strategic technical investment that combines the benefits of open source development with enterprise-grade, AI-native capabilities. By providing professional-grade tools ready for the challenges familiar to the complex technical environment of higher education, GitLab Premium with Duo helps institutions address security vulnerabilities, streamline development workflows, and maintain the reliable infrastructure that academic and research operations depend on.\n\n> [Learn more about GitLab for Public Sector](https://about.gitlab.com/solutions/public-sector/) or  [speak to our sales team today](https://about.gitlab.com/sales/).\n\n## Read more\n\n- [Unlocking AI for every GitLab Premium and Ultimate customer](https://about.gitlab.com/blog/gitlab-premium-with-duo/)\n- [GitLab Duo Code Suggestions](https://docs.gitlab.com/user/project/repository/code_suggestions/)\n- [GitLab Duo Chat](https://docs.gitlab.com/user/gitlab_duo_chat/)",[546,475,757,785,184],{"slug":1211,"featured":91,"template":788},"ai-native-gitlab-premium-transform-higher-education-software-development",{"content":1213,"config":1216},{"title":817,"description":818,"authors":1214,"heroImage":821,"date":822,"body":823,"category":679,"tags":1215},[820],[757,785,475,825,826,827],{"slug":829,"featured":91,"template":788},1751484613874]