diff --git a/.github/scripts/generate_index.py b/.github/scripts/generate_index.py
new file mode 100644
index 000000000..48fdd8a56
--- /dev/null
+++ b/.github/scripts/generate_index.py
@@ -0,0 +1,170 @@
+import os
+import sys
+import re
+import itertools
+import requests
+import hashlib
+
+from urllib.parse import quote
+from pathlib import Path
+from github import Github
+from typing import List, Dict, Set
+
+# Define yanked versions - modify this dictionary as needed
+yanked_versions = {
+         "confluent-kafka": {
+             "2.11.0+gr",
+             "2.11.0+gr.1",
+         },
+    }
+
+HTML_TEMPLATE = """
+ 
+ 
+     {package_name}
+ 
+ 
+     {package_name}
+     {package_links}
+ 
+ 
+"""
+
+def normalize(name):
+    """Normalize package name according to PEP 503."""
+    return re.sub(r"[-_.]+", "-", name).lower()
+
+def calculate_sha256(file_path):
+    with open(file_path, "rb") as f:
+        digest = hashlib.file_digest(f, "sha256")
+
+    return digest.hexdigest()
+
+def extract_version_from_filename(filename: str) -> str:
+    """Extract version from wheel or sdist filename."""
+    # Remove extension
+    name = filename.replace('.tar.gz', '').replace('.whl', '')
+    
+    # For wheels: package-version-python-abi-platform
+    # For sdist: package-version
+    parts = name.split('-')
+    if len(parts) >= 2:
+        return parts[1]
+    return ""
+
+class PackageIndexBuilder:
+    def __init__(self, token: str, repo_name: str, output_dir: str, yanked_versions: Dict[str, Set[str]] = None):
+        self.github = Github(token)
+        self.repo = self.github.get_repo(repo_name)
+        self.output_dir = Path(output_dir)
+        self.packages: Dict[str, List[Dict]] = {}
+        self.yanked_versions = yanked_versions or {}
+        
+        # Set up authenticated session
+        self.session = requests.Session()
+        self.session.headers.update({
+            "Authorization": f"token {token}",
+            "Accept": "application/octet-stream",
+        })
+
+    def is_version_yanked(self, package_name: str, version: str) -> bool:
+        """Check if a specific version of a package is yanked."""
+        normalized_package = normalize(package_name)
+        return normalized_package in self.yanked_versions and version in self.yanked_versions[normalized_package]
+
+    def collect_packages(self):
+        print("Query release assets")
+        
+        for release in self.repo.get_releases():
+            for asset in release.get_assets():
+                if asset.name.endswith(('.whl', '.tar.gz')):
+                    package_name = normalize(asset.name.split('-')[0])
+                    if package_name not in self.packages:
+                        self.packages[package_name] = []
+
+                    version = extract_version_from_filename(asset.name)
+                    self.packages[package_name].append({
+                        'filename': asset.name,
+                        'url': asset.url,
+                        'size': asset.size,
+                        'upload_time': asset.created_at.strftime('%Y-%m-%d %H:%M:%S'),
+                        'version': version,
+                    })
+
+    def generate_index_html(self):
+        # Generate main index
+        package_list = self.packages.keys()
+        main_index = HTML_TEMPLATE.format(
+            package_name="Simple Package Index",
+            package_links="\n".join([f'{x}
' for x in package_list])
+        )
+
+        with open(self.output_dir / "index.html", "w") as f:
+            f.write(main_index)
+ 
+        for package, assets in self.packages.items():
+
+            package_dir = self.output_dir / package
+            package_dir.mkdir(exist_ok=True)
+
+            # Generate package-specific index.html
+            file_links = []
+            assets = sorted(assets, key=lambda x: x["filename"])
+            for filename, items in itertools.groupby(assets, key=lambda x: x["filename"]):
+                asset_info = next(items)
+                url = asset_info['url']
+                version = asset_info['version']
+
+                # Download the file
+                with open(package_dir / filename, 'wb') as f:
+                    print (f"Downloading '{filename}' from '{url}'")
+                    response = self.session.get(url, stream=True)
+                    response.raise_for_status()
+                    for chunk in response.iter_content(chunk_size=8192):
+                        if chunk:
+                            f.write(chunk)
+
+                sha256_hash = calculate_sha256(package_dir / filename)
+
+                # Check if this version is yanked
+                yanked_attr = ""
+                if self.is_version_yanked(package, version):
+                    yanked_attr = ' data-yanked="true"'
+
+                file_links.append(
+                    f'{filename}
'
+                )
+
+            package_index = HTML_TEMPLATE.format(
+                package_name=f"Links for {package}",
+                package_links="\n".join(file_links)
+            )
+
+            with open(package_dir / "index.html", "w") as f:
+                f.write(package_index)
+
+    def build(self):
+        # Create output directory
+        self.output_dir.mkdir(parents=True, exist_ok=True)
+
+        # Collect and generate
+        self.collect_packages()
+        self.generate_index_html()
+
+
+def main():
+    # Get environment variables
+    token = os.environ.get("GITHUB_TOKEN")
+    repo = os.environ.get("GITHUB_REPOSITORY")
+    print (repo)
+    output_dir = os.environ.get("OUTPUT_DIR", "dist")
+    
+    if not all([token, repo]):
+        print ("Missing required environment variables")
+        sys.exit(1)
+
+    builder = PackageIndexBuilder(token, repo, output_dir, yanked_versions)
+    builder.build()
+
+if __name__ == "__main__":
+    main()
diff --git a/.github/workflows/package.yml b/.github/workflows/package.yml
new file mode 100644
index 000000000..f0506b668
--- /dev/null
+++ b/.github/workflows/package.yml
@@ -0,0 +1,128 @@
+# .github/workflows/build-wheels.yml
+name: Build and Package Wheels
+
+on:
+  pull_request:
+  push:
+
+env:
+  LIBRDKAFKA_VERSION: v2.11.1.1-RC1
+
+jobs:
+
+  build-linux:
+    name: Build wheels for Linux ${{ matrix.arch }}
+    strategy:
+      matrix:
+        include:
+          - arch: x64
+            runner: ubuntu-24.04
+          - arch: arm64
+            runner: ubuntu-24.04-arm
+    runs-on: ${{ matrix.runner }}
+    env:
+      OS_NAME: linux
+      ARCH: ${{ matrix.arch }}
+      GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
+    steps:
+      - uses: actions/checkout@v4
+      - name: Build wheels
+        run: |
+          ./tools/wheels/build-wheels.sh "${LIBRDKAFKA_VERSION#v}" wheelhouse
+      - uses: actions/upload-artifact@v4
+        with:
+          name: wheels-${{ env.OS_NAME }}-${{ env.ARCH }}
+          path: wheelhouse/confluent_kafka*.whl
+
+  build-windows:
+    name: Build wheels for Windows
+    runs-on: windows-latest
+    env:
+      OS_NAME: windows
+      ARCH: x64
+      CHERE_INVOKING: yes
+      MSYSTEM: UCRT64
+      GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
+    steps:
+      - uses: actions/checkout@v4
+      - name: Setup MSYS2
+        uses: msys2/setup-msys2@v2
+      - name: Build wheels
+        shell: bash
+        run: |
+          ./tools/mingw-w64/msys2-dependencies.sh
+          bash tools/mingw-w64/semaphore_commands.sh
+          bash tools/wheels/install-librdkafka.sh ${LIBRDKAFKA_VERSION#v} dest
+          tools/wheels/build-wheels.bat x64 win_amd64 dest wheelhouse
+      - uses: actions/upload-artifact@v4
+        with:
+          name: wheels-${{ env.OS_NAME }}-${{ env.ARCH }}
+          path: wheelhouse/confluent_kafka*.whl
+
+  create_release_artifacts:
+    if: github.event_name == 'push' && startsWith(github.ref, 'refs/tags/')
+    name: Create release artifacts
+    needs: [build-linux, build-windows]
+    runs-on: ubuntu-latest
+    steps:
+      - uses: actions/checkout@v4
+      - uses: astral-sh/setup-uv@v6
+      - name: Check version consistency
+        run: |
+          tag_version=${GITHUB_REF/refs\/tags\/v/}
+          proj_version=$(uv run --no-project --with poetry poetry version -s)
+          if [ "$tag_version" != "$proj_version" ]; then
+            echo "::error title=Version mismatch::Tag version \"$tag_version\" does not match project version \"$proj_version\"."
+            exit 1
+          fi
+      - uses: actions/download-artifact@v4
+        with:
+          path: artifacts
+          pattern: wheels-*
+          merge-multiple: true
+
+      - name: Create release
+        uses: softprops/action-gh-release@v2
+        with:
+          files: |
+            artifacts/confluent_kafka*
+        env:
+          GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
+
+  publish_simple_package_index:
+    name: Build&publish a PyPI-compatible package index
+    runs-on: ubuntu-latest
+    needs: [create_release_artifacts]
+    if: always() && !failure() && !cancelled() && (github.event_name == 'push')
+    concurrency:
+      group: simple_package_index
+      cancel-in-progress: true
+    permissions:
+      contents: write
+      actions: read
+      packages: read
+      pages: write
+      id-token: write
+    steps:    
+      - uses: actions/checkout@v2
+      - name: Generate Package Index
+        run: |        
+          python -m pip install --upgrade pip
+          pip install PyGithub
+          python .github/scripts/generate_index.py
+        env:
+          GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
+          OUTPUT_DIR: dist
+
+      - name: Upload Site Artifact
+        uses: actions/upload-pages-artifact@v3
+        with:
+          path: 'dist'
+
+      - name: Deploy to GitHub Pages
+        id: deployment
+        uses: actions/deploy-pages@v4
+
+      - name: Display GitHub Pages URL
+        run: |
+          echo "Package Index URL: ${{ steps.deployment.outputs.page_url }}"
diff --git a/pyproject.toml b/pyproject.toml
index 6c2f0c1bb..77b268ba5 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -4,7 +4,7 @@ build-backend = "setuptools.build_meta"
 
 [project]
 name = "confluent-kafka"
-version = "2.11.1"
+version = "2.11.1.1rc1+gr"
 description = "Confluent's Python client for Apache Kafka"
 classifiers = [
     "Development Status :: 5 - Production/Stable",
diff --git a/tools/wheels/install-librdkafka.sh b/tools/wheels/install-librdkafka.sh
index 3a772a2ae..cbe2aecc5 100755
--- a/tools/wheels/install-librdkafka.sh
+++ b/tools/wheels/install-librdkafka.sh
@@ -19,7 +19,19 @@ echo "$0: Installing librdkafka $VER to $DEST"
 [[ -d "$DEST" ]] || mkdir -p "$DEST"
 pushd "$DEST"
 
-curl -L -o lrk$VER.zip https://www.nuget.org/api/v2/package/librdkafka.redist/$VER
+# Check if variable exists
+if [ -z "${GITHUB_TOKEN}" ]; then
+    echo "Error: GITHUB_TOKEN is not set"
+    exit 1
+fi
+
+curl -H "Authorization: Bearer ${GITHUB_TOKEN}" \
+ -H "Accept: application/vnd.github.v3+json" \
+ -L \
+ -o lrk$VER.zip \
+https://nuget.pkg.github.com/G-Research/download/librdkafka.redist/$VER/librdkafka.redist.$VER.nupkg
+
+#curl -L -o lrk$VER.zip https://www.nuget.org/api/v2/package/librdkafka.redist/$VER
 
 unzip lrk$VER.zip
 
@@ -28,12 +40,7 @@ ARCH=${ARCH:-x64}
 if [[ $OSTYPE == linux* ]]; then
     # Linux
 
-    # Copy the librdkafka build with least dependencies to librdkafka.so.1
-    if [[ $ARCH == arm64* ]]; then
-        cp -v runtimes/linux-$ARCH/native/{librdkafka.so,librdkafka.so.1}
-    else
-        cp -v runtimes/linux-$ARCH/native/{centos8-librdkafka.so,librdkafka.so.1}
-    fi
+    cp -v runtimes/linux-$ARCH/native/{librdkafka.so,librdkafka.so.1}
     ldd runtimes/linux-$ARCH/native/librdkafka.so.1
 
 elif [[ $OSTYPE == darwin* ]]; then