Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Change Github Action's triggers #24222

Open
wants to merge 1 commit into
base: main
Choose a base branch
from
Open

Change Github Action's triggers #24222

wants to merge 1 commit into from

Conversation

snnn
Copy link
Member

@snnn snnn commented Mar 28, 2025

  1. Change Github Action's triggers
  2. Auto format the yaml files with ruamel.yaml

Because this change, the pipelines' triggers are set as following:

on:
  push:
    branches: [ main, 'rel-*']
  pull_request:
    branches: [ main, 'rel-*']
concurrency:
  group: ${{ github.workflow }}-${{ github.ref }}
  cancel-in-progress: true

I set "cancel-in-progress: true" because for pipeline runs triggered by pull requests if the pull request was updated(a new commit was added there), the old pipeline runs can be cancelled.

However, this setting doesn't work well for the runs triggered by "push" events for the main branch. Let's say, we merged a PR , then it triggered this pipeline. Then before the pipeline is finished, we merged another PR. Then the old pipeline run will be cancelled. I don't want this. Because each commit in the main branch should be verified.

The files changes were generated by the following python script:

import sys
from pathlib import Path
from ruamel.yaml import YAML
from ruamel.yaml.scalarstring import LiteralScalarString

WORKFLOWS_DIR = Path(".github/workflows")
# The desired concurrency group value
NEW_GROUP_VALUE = "${{ github.workflow }}-${{ github.event_name == 'pull_request' && github.ref || github.sha }}"

def update_workflow_file(filepath: Path, yaml: YAML):
    """
    Loads a workflow file, updates the concurrency group if necessary,
    and writes it back. Returns True if updated, False otherwise.
    """
    print(f"--- Processing: {filepath.name} ---")
    try:
        with open(filepath, 'r', encoding='utf-8') as infile:
            # Load YAML data, preserving structure
            data = yaml.load(infile)

        if not isinstance(data, dict):
            print("   Skipping: File content is not a dictionary.")
            return False

        # Check if 'concurrency' exists and is a dictionary
        if 'concurrency' in data and isinstance(data.get('concurrency'), dict):
            concurrency_block = data['concurrency']
            current_group = concurrency_block.get('group')
            current_cancel = concurrency_block.get('cancel-in-progress')

            needs_update = False
            if current_group != NEW_GROUP_VALUE:
                print(f"   Updating 'group' from: {current_group}")
                concurrency_block['group'] = NEW_GROUP_VALUE
                needs_update = True
            else:
                print("   'group' is already up-to-date.")

            # Ensure cancel-in-progress is true if we are managing this block
            if not current_cancel: # Checks for None or False
                 print("   Setting 'cancel-in-progress: true'")
                 concurrency_block['cancel-in-progress'] = True
                 needs_update = True
            elif current_cancel is not True: # If it exists but is not 'true'
                 print(f"   Updating 'cancel-in-progress' from {current_cancel} to true")
                 concurrency_block['cancel-in-progress'] = True
                 needs_update = True
            else:
                 print("   'cancel-in-progress' is already true.")


            if needs_update:
                # Write the updated data back to the file
                try:
                    with open(filepath, 'w', encoding='utf-8') as outfile:
                        yaml.dump(data, outfile)
                    print(f"   Successfully updated: {filepath.name}")
                    return True
                except IOError as e:
                    print(f"   ERROR: Could not write to file {filepath.name}: {e}", file=sys.stderr)
                    return False
            else:
                print(f"   No changes needed for: {filepath.name}")
                return False

        else:
            print("   Skipping: No 'concurrency' block found or it's not a dictionary.")
            return False

    except yaml.YAMLError as e:
        print(f"   ERROR: Could not parse YAML in {filepath.name}: {e}", file=sys.stderr)
        return False
    except IOError as e:
        print(f"   ERROR: Could not read file {filepath.name}: {e}", file=sys.stderr)
        return False
    except Exception as e:
        print(f"   ERROR: An unexpected error occurred processing {filepath.name}: {e}", file=sys.stderr)
        return False


def main():
    """
    Main function to find and update workflow files.
    """
    if not WORKFLOWS_DIR.is_dir():
        print(f"ERROR: Directory not found: {WORKFLOWS_DIR}", file=sys.stderr)
        sys.exit(1)

    # Initialize YAML parser that preserves structure and quotes
    yaml = YAML()
    yaml.preserve_quotes = True
    # Optional: Adjust indentation if needed (GitHub Actions often uses 2 spaces)
    yaml.indent(mapping=2, sequence=4, offset=2)

    updated_files = 0
    skipped_files = 0
    error_files = 0

    # Find all .yaml and .yml files in the directory
    workflow_files = list(WORKFLOWS_DIR.glob("*.yaml")) + list(WORKFLOWS_DIR.glob("*.yml"))

    if not workflow_files:
        print(f"No workflow files (.yaml/.yml) found in {WORKFLOWS_DIR}")
        sys.exit(0)

    print(f"Found {len(workflow_files)} potential workflow files in {WORKFLOWS_DIR}...")

    for filepath in workflow_files:
        if filepath.is_file():
            result = update_workflow_file(filepath, yaml)
            if result is True:
                updated_files += 1
            elif result is False:
                skipped_files += 1
            else: # Should not happen with current return values, but good practice
                 error_files += 1
        else:
            print(f"Skipping non-file item: {filepath.name}") # Should not happen with glob

    print("\n--- Summary ---")
    print(f"Files updated: {updated_files}")
    print(f"Files skipped/no changes: {skipped_files}")
    if error_files: # Only print errors if they occurred
        print(f"Files with errors: {error_files}", file=sys.stderr)
        sys.exit(1) # Exit with error code if any file processing failed

if __name__ == "__main__":
    main()

@snnn snnn changed the title Change triggers Change Github Action's triggers Mar 28, 2025
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment
Labels
None yet
Projects
None yet
Development

Successfully merging this pull request may close these issues.

1 participant