initial commit; duping the c90 project

This commit is contained in:
Greg Gauthier 2026-02-17 21:32:14 +00:00
commit b770b1c174
10 changed files with 864 additions and 0 deletions

View File

@ -0,0 +1,109 @@
name: MVS Delete Members
on:
push:
branches: [ master ]
paths:
- 'src/**'
- 'jcl/**'
workflow_dispatch: # Allow manual trigger for cleanup
jobs:
delete-members:
runs-on: ubuntu-gitea
steps:
- name: Checkout code
uses: actions/checkout@v4
with:
fetch-depth: 0 # Full history for git diff
- name: Check if workflow should run
id: check
run: |
echo "Checking if there are deleted source files..."
if git rev-parse --verify HEAD~1 >/dev/null 2>&1; then
DELETED_FILES=$(git diff --name-only --diff-filter=D HEAD~1 2>/dev/null | grep -E '\.(c|bas)$' || true)
if [ -z "$DELETED_FILES" ]; then
echo "No deleted source files found. Skipping workflow."
echo "should_run=false" >> $GITHUB_OUTPUT
exit 0
fi
echo "Found deleted files: $DELETED_FILES"
echo "should_run=true" >> $GITHUB_OUTPUT
else
echo "No parent commit, skipping workflow"
echo "should_run=false" >> $GITHUB_OUTPUT
fi
- name: Prepare environment
if: steps.check.outputs.should_run == 'true'
id: setup
run: |
echo "=== Debug: Starting setup ==="
apt-get update && apt install -y netcat-traditional python3-requests
nc -h
echo "=== Debug: Setup complete ==="
- name: Find deleted source files (deleted only)
if: steps.check.outputs.should_run == 'true'
id: deleted
run: |
echo "=== Debug: Starting deletion detection ==="
echo "Current dir: $(pwd)"
# Check if we have a parent commit
if git rev-parse --verify HEAD~1 >/dev/null 2>&1; then
echo "Parent commit exists; checking for deletions."
# Use --diff-filter=D to only get Deleted files (exclude Added/Modified)
DELETED_FILES=$(git diff --name-only --diff-filter=D HEAD~1 2>/dev/null | grep -E '\.(c|bas)$')
echo "Deleted files from last commit: '${DELETED_FILES}'"
else
echo "No parent commit; no deletions to process."
DELETED_FILES=""
fi
if [ -z "$DELETED_FILES" ]; then
echo "No deleted C/BAS files found; skipping workflow."
echo "has_deletions=false" >> $GITHUB_OUTPUT
exit 0
fi
# Process deleted files - convert to space-separated list of members
DELETED_MEMBERS=""
for DFILE in $DELETED_FILES; do
DEXT="${DFILE##*.}"
DBASE=$(basename "$DFILE" ".$DEXT")
DELETED_MEMBERS="$DELETED_MEMBERS $DBASE"
done
echo "Deleted members: $DELETED_MEMBERS"
echo "deleted_members=$DELETED_MEMBERS" >> $GITHUB_OUTPUT
echo "has_deletions=true" >> $GITHUB_OUTPUT
echo "=== Debug: Deletion detection complete ==="
- name: Delete removed members from PDS
if: steps.check.outputs.should_run == 'true' && steps.deleted.outputs.has_deletions == 'true'
run: |
echo "=== Starting deletion of removed members ==="
echo "Deleted members: ${{ steps.deleted.outputs.deleted_members }}"
for MEMBER in ${{ steps.deleted.outputs.deleted_members }}; do
echo "Deleting member: $MEMBER"
python3 scripts/del_member.py "@05054.SRCLIB.C($MEMBER)"
done
echo "=== Deletion complete ==="
env:
MVS_BATCH_PASSWORD: ${{ vars.MVS_BATCH_PASSWORD }}
MVS_HOST: "oldcomputernerd.com"
- name: Report Status
if: steps.check.outputs.should_run == 'true' && steps.deleted.outputs.has_deletions == 'true'
run: |
echo "Deletion complete! Members removed from mainframe PDS."
- name: Workflow skipped
if: steps.check.outputs.should_run == 'false'
run: |
echo "Workflow skipped - no deleted source files in this commit."

View File

@ -0,0 +1,138 @@
name: MVS Submit & Execute
on:
push:
branches: [ master ]
paths:
- 'src/**' # Trigger only if src/ (C sources) changes
- 'jcl/**' # Trigger only if jcl/ (JCL for batch jobs) changes
pull_request:
branches: [ master ]
paths:
- 'src/**' # Same for pull requests
- 'jcl/**'
jobs:
upload-and-run:
runs-on: ubuntu-gitea
steps:
- name: Checkout code
uses: actions/checkout@v4
with:
fetch-depth: 0 # Full history for git diff
- name: Check if workflow should run
id: check
run: |
echo "Checking if there are added/modified source files..."
if git rev-parse --verify HEAD~1 >/dev/null 2>&1; then
CHANGED_FILES=$(git diff --name-only --diff-filter=AM HEAD~1 2>/dev/null | grep -E '\.(c|bas)$' || true)
if [ -z "$CHANGED_FILES" ]; then
echo "No added/modified source files found. Skipping workflow."
echo "should_run=false" >> $GITHUB_OUTPUT
exit 0
fi
echo "Found changed files: $CHANGED_FILES"
echo "should_run=true" >> $GITHUB_OUTPUT
else
echo "No parent commit, allowing workflow to run"
echo "should_run=true" >> $GITHUB_OUTPUT
fi
- name: Prepare environment
if: steps.check.outputs.should_run == 'true'
id: setup
run: |
echo "=== Debug: Starting setup ==="
echo "Current dir: $(pwd)"
echo "Files in repo: $(ls -la)"
apt-get update && apt install -y netcat-traditional python3-requests
nc -h
echo "=== Debug: Setup complete ==="
- name: Find changed source files (added/modified only)
if: steps.check.outputs.should_run == 'true'
id: files
run: |
echo "=== Debug: Starting file detection ==="
echo "Current dir: $(pwd)"
echo "Files in repo: $(ls -la)"
echo "=== Debug: Checking for parent commit ==="
if git rev-parse --verify HEAD~1 >/dev/null 2>&1; then
echo "Parent commit exists; running git diff for added/modified/renamed files."
# Use --diff-filter=AMR to get Added, Modified, and Renamed files (exclude Deleted)
CHANGED_FILES=$(git diff --name-only --diff-filter=AMR HEAD~1 2>/dev/null | grep -E '\.(c|bas)$' | head -1)
echo "Added/Modified files from last commit: '${CHANGED_FILES}'"
else
echo "No parent commit; skipping diff."
CHANGED_FILES=""
fi
echo "=== Debug: Git diff check complete ==="
# Fallback to all .c/.bas files if no changes or no previous commit
if [ -z "$CHANGED_FILES" ]; then
echo "=== Debug: No added/modified files found; running fallback find ==="
# Find newest .c/.bas by modification time (sort -nr on %T@ timestamp)
CHANGED_FILES=$(find . -type f \( -name "*.c" -o -name "*.bas" \) -printf '%T@ %p\n' 2>/dev/null | sort -nr | cut -d' ' -f2- | head -1)
echo "Fallback files (newest first): '${CHANGED_FILES}'"
echo "=== Debug: Fallback complete ==="
fi
if [ -z "$CHANGED_FILES" ]; then
echo "No added/modified C/BAS files found; skipping workflow."
exit 0 # Graceful skip, no failure
fi
echo "=== Debug: Processing final file ==="
echo "Final selected file: '${CHANGED_FILES}'"
echo "file=$CHANGED_FILES" >> $GITHUB_OUTPUT
# Extract member name (handle .c or .bas)
EXT="${CHANGED_FILES##*.}"
BASE=$(basename "$CHANGED_FILES" ".$EXT")
echo "member=$BASE" >> $GITHUB_OUTPUT
echo "=== Debug: File detection complete ==="
- name: Upload to PDS and Submit JCL
if: steps.check.outputs.should_run == 'true' && steps.files.outputs.file != ''
run: |
echo "=== Debug: Starting upload/submit ==="
echo "File: ${{ steps.files.outputs.file }}"
echo "Member: ${{ steps.files.outputs.member }}"
python3 scripts/submit_job.py "${{ steps.files.outputs.file }}" "@05054.SRCLIB.C(${{ steps.files.outputs.member }})"
echo "=== Debug: Upload/submit complete ==="
env:
MVS_BATCH_PASSWORD: ${{ vars.MVS_BATCH_PASSWORD }}
MVS_HOST: "oldcomputernerd.com"
- name: Poll for job completion and retrieve output
if: steps.check.outputs.should_run == 'true' && steps.files.outputs.file != ''
run: |
echo "=== Waiting for job completion ==="
python3 scripts/poll_job.py "${{ steps.files.outputs.member }}" 120
echo "=== Job output retrieved ==="
env:
MVS_CONSOLE_URL: ${{ vars.MVS_CONSOLE_URL }}
MVS_CONSOLE_USER: ${{ vars.MVS_CONSOLE_USER }}
MVS_CONSOLE_PASSWORD: ${{ secrets.MVS_CONSOLE_PASSWORD }}
LINODE_SSH_HOST: ${{ vars.LINODE_SSH_HOST }}
LINODE_PRINTOUT_DIR: ${{ vars.LINODE_PRINTOUT_DIR }}
LOCAL_PRINTOUT_DIR: /printouts
- name: Upload job output as artifact
if: steps.check.outputs.should_run == 'true' && steps.files.outputs.file != ''
uses: actions/upload-artifact@v3
with:
name: job-output-${{ steps.files.outputs.member }}
path: "${{ steps.files.outputs.member }}_J*.pdf"
if-no-files-found: warn
- name: Report Status
if: steps.check.outputs.should_run == 'true' && steps.files.outputs.file != ''
run: |
echo "Build complete! Job output PDF has been archived as a build artifact."
- name: Workflow skipped
if: steps.check.outputs.should_run == 'false'
run: |
echo "Workflow skipped - no added/modified source files in this commit."

2
.gitignore vendored Normal file
View File

@ -0,0 +1,2 @@
poetry.lock

0
README.md Normal file
View File

5
jcl/TEMPLATE.jcl Normal file
View File

@ -0,0 +1,5 @@
//{NAME} JOB (GCC),'C Program',
// NOTIFY=@05054,CLASS=A,MSGCLASS=A,
// MSGLEVEL=(1,1),REGION=4M,TIME=1440
//STEP1 EXEC GCCCG,INFILE='@05054.SRCLIB.C({NAME})'
//

69
newjob.sh Executable file
View File

@ -0,0 +1,69 @@
#!/bin/bash
# Script to create a new C project with JCL and source file
# Usage: ./newjob.sh <NAME>
# NAME must be 8 characters or less (MVS member name restriction)
set -e
if [ $# -ne 1 ]; then
echo "Usage: $0 <NAME>"
echo " NAME: 8 characters or less (will be uppercased for MVS)"
echo ""
echo "Example:"
echo " $0 hello"
exit 1
fi
NAME_INPUT="$1"
NAME=$(echo "$NAME_INPUT" | tr '[:lower:]' '[:upper:]')
# Validate name length (MVS member names are max 8 characters)
if [ ${#NAME} -gt 8 ]; then
echo "Error: Name '$NAME' is longer than 8 characters (${#NAME} chars)"
echo "MVS member names must be 8 characters or less"
exit 1
fi
# Validate name format (alphanumeric, must start with letter)
if ! [[ "$NAME" =~ ^[A-Z][A-Z0-9]*$ ]]; then
echo "Error: Name '$NAME' must start with a letter and contain only letters and numbers"
exit 1
fi
JCL_FILE="jcl/${NAME}.jcl"
SRC_FILE="src/${NAME}.c"
# Check if files already exist
if [ -f "$JCL_FILE" ]; then
echo "Error: JCL file '$JCL_FILE' already exists"
exit 1
fi
if [ -f "$SRC_FILE" ]; then
echo "Error: Source file '$SRC_FILE' already exists"
exit 1
fi
# Create JCL from template
if [ ! -f "jcl/TEMPLATE.jcl" ]; then
echo "Error: Template file 'jcl/TEMPLATE.jcl' not found"
exit 1
fi
echo "Creating new C project: $NAME"
echo ""
# Replace {NAME} placeholders in template
sed "s/{NAME}/$NAME/g" jcl/TEMPLATE.jcl > "$JCL_FILE"
echo "✓ Created JCL: $JCL_FILE"
# Create empty C source file
touch "$SRC_FILE"
echo "✓ Created source: $SRC_FILE"
echo ""
echo "Project '$NAME' created successfully!"
echo "Next steps:"
echo " 1. Edit $SRC_FILE with your C code"
echo " 2. Commit and push to trigger mainframe build"

17
pyproject.toml Normal file
View File

@ -0,0 +1,17 @@
[project]
name = "tk5-c90-projects"
version = "0.1.0"
description = ""
authors = [
{name = "Greg Gauthier",email = "gmgauthier@protonmail.com"}
]
readme = "README.md"
requires-python = ">=3.12"
dependencies = [
"requests (>=2.32.5,<3.0.0)"
]
[build-system]
requires = ["poetry-core>=2.0.0,<3.0.0"]
build-backend = "poetry.core.masonry.api"

96
scripts/del_member.py Normal file
View File

@ -0,0 +1,96 @@
#!/usr/bin/env python3
import sys
import subprocess
import tempfile
import os
# Force temp files into a folder inside your project
custom_temp_dir = os.path.join(os.getcwd(), "tmp")
os.makedirs(custom_temp_dir, exist_ok=True)
tempfile.tempdir = custom_temp_dir
MVSHOST = "oldcomputernerd.com"
RDRPORT = 3505
MVS_PASSWORD = os.environ.get("MVS_BATCH_PASSWORD")
def create_delete_jcl(dataset_name, member_name):
"""Create JCL to delete a PDS member using IEHPROGM"""
jcl = f"""
//DELETE JOB (ACCT),'DELETE',
// USER=@05054,PASSWORD={MVS_PASSWORD},
// CLASS=A,MSGCLASS=H,NOTIFY=@05054
//DELMEM EXEC PGM=IEHPROGM
//SYSPRINT DD SYSOUT=*
//DD1 DD DSN={dataset_name},DISP=SHR
//SYSIN DD *
SCRATCH DSNAME={dataset_name},MEMBER={member_name}
/*
"""
return jcl
def delete_member(dataset_name, member_name, mvshost=MVSHOST):
"""Delete a member from MVS PDS"""
payload = create_delete_jcl(dataset_name, member_name)
# Write JCL to temporary file and submit via netcat
with tempfile.NamedTemporaryFile(mode='w', delete=False, suffix='.jcl') as tmpfile:
tmpfile.write(payload)
tmpfile.flush()
tmpfile_path = tmpfile.name
try:
with open(tmpfile_path, 'rb') as f:
result = subprocess.run(
['nc', '-w', '5', mvshost, str(RDRPORT)],
input=f.read(),
check=True,
capture_output=True
)
print(f"Deleted {dataset_name}({member_name})")
if result.stdout:
print("JES response:", result.stdout.decode(errors='ignore').strip())
return 0
except subprocess.CalledProcessError as e:
print(f"Deletion failed: {e}")
print("stderr:", e.stderr.decode(errors='ignore'))
return 1
finally:
os.unlink(tmpfile_path)
if __name__ == "__main__":
if len(sys.argv) < 2:
print("Usage: delete_mvs_member.py <pds_destination> [mvshost]")
print()
print("Arguments:")
print(" pds_destination - PDS destination as DATASET(MEMBER) (required)")
print(" mvshost - MVS host (optional, default: oldcomputernerd.com)")
print()
print("Examples:")
print(" delete_mvs_member.py '@05054.SRCLIB.C(SIEVE11)'")
print(" delete_mvs_member.py '@05054.SRCLIB.C(HELLO)' mainframe.example.com")
sys.exit(1)
destination = sys.argv[1]
# Parse PDS syntax: DATASET(MEMBER)
if '(' in destination and destination.endswith(')'):
dataset_name = destination[:destination.index('(')]
member_name = destination[destination.index('(')+1:-1]
else:
print(f"Error: Invalid PDS syntax '{destination}'. Use format: DATASET(MEMBER)")
sys.exit(1)
# Optional host override
mvshost = sys.argv[2] if len(sys.argv) > 2 else MVSHOST
print(f"Deleting: {dataset_name}({member_name})")
print(f"Host: {mvshost}")
print()
sys.exit(delete_member(dataset_name, member_name, mvshost))

218
scripts/poll_job.py Executable file
View File

@ -0,0 +1,218 @@
#!/usr/bin/env python3
import sys
import os
import re
import time
import subprocess
import requests
from requests.auth import HTTPBasicAuth
CONSOLE_URL = os.environ.get("MVS_CONSOLE_URL", "http://oldcomputernerd.com:8038/cgi-bin/tasks/syslog")
CONSOLE_USER = os.environ.get("MVS_CONSOLE_USER", "gmgauthier")
CONSOLE_PASS = os.environ.get("MVS_CONSOLE_PASSWORD")
LINODE_HOST = os.environ.get("LINODE_SSH_HOST", "gmgauthier@socrates")
LINODE_PRINTOUT_DIR = os.environ.get("LINODE_PRINTOUT_DIR", "/home/gmgauthier/printouts")
def get_syslog():
"""Fetch the Hercules syslog via HTTP"""
try:
response = requests.get(
CONSOLE_URL,
auth=HTTPBasicAuth(CONSOLE_USER, CONSOLE_PASS),
timeout=10
)
response.raise_for_status()
return response.text
except requests.RequestException as e:
print(f"Failed to fetch syslog: {e}")
return None
def find_job_number(syslog, jobname):
"""Extract job number from $HASP100 message"""
# Pattern: /12.28.02 JOB 257 $HASP100 SIMPLE2 ON READER1
pattern = rf'/\d+\.\d+\.\d+\s+JOB\s+(\d+)\s+\$HASP100\s+{jobname}\s+ON\s+READER'
match = re.search(pattern, syslog, re.IGNORECASE)
if match:
return match.group(1)
return None
def check_job_ended(syslog, jobname, job_number):
"""Check if a job has ended (HASP395 ENDED)"""
# Pattern for job ended: /18.24.41 JOB 276 $HASP395 GMG0001 ENDED
ended_pattern = rf'/\d+\.\d+\.\d+\s+JOB\s+{job_number}\s+\$HASP395\s+{jobname}\s+ENDED'
return re.search(ended_pattern, syslog, re.IGNORECASE) is not None
def check_job_printed(syslog, jobname, job_number):
"""Check if a job has printed output (HASP150)"""
# Pattern for job printed: /12.28.03 JOB 257 $HASP150 SIMPLE2 ON PRINTER1
printed_pattern = rf'/\d+\.\d+\.\d+\s+JOB\s+{job_number}\s+\$HASP150\s+{jobname}\s+ON\s+PRINTER'
return re.search(printed_pattern, syslog, re.IGNORECASE) is not None
def list_pdfs_local(local_dir):
"""List PDF files in a local directory (for mounted volumes)"""
import glob
pdf_files = glob.glob(f"{local_dir}/v1403-*.pdf")
# Sort by modification time, newest first
pdf_files.sort(key=os.path.getmtime, reverse=True)
return pdf_files
def list_pdfs_remote():
"""List PDF files on remote Linode via SSH"""
cmd = f"ssh {LINODE_HOST} ls -t {LINODE_PRINTOUT_DIR}/v1403-*.pdf"
try:
result = subprocess.run(
cmd,
shell=True,
check=True,
capture_output=True,
text=True
)
return result.stdout.strip().split('\n')
except subprocess.CalledProcessError:
return []
def find_pdf_for_job(job_number, jname, local_printout_dir=None):
"""Find the PDF matching job number and name"""
pattern = f"v1403-J{job_number}_{jname}-"
# Try the local directory first (for mounted volumes in CI)
if local_printout_dir and os.path.isdir(str(local_printout_dir)):
pdfs = list_pdfs_local(local_printout_dir)
for pdf_path in pdfs:
if pattern in pdf_path:
return pdf_path
return None
# Fall back to remote SSH access
pdfs = list_pdfs_remote()
for pdf_path in pdfs:
if pattern in pdf_path:
return pdf_path
return None
def retrieve_pdf(source_path, local_filename, is_local=False):
"""Retrieve PDF either locally (copy) or remotely (SCP)"""
try:
if is_local:
# Local copy from a mounted volume
import shutil
shutil.copy2(source_path, local_filename)
print(f"Copied: {local_filename}")
else:
# Remote SCP
cmd = f"scp {LINODE_HOST}:{source_path} {local_filename}"
subprocess.run(cmd, shell=True, check=True)
print(f"Retrieved: {local_filename}")
return True
except (subprocess.CalledProcessError, IOError) as e:
print(f"Failed to retrieve PDF: {e}")
return False
def poll_for_job(jn, to=300, poll_interval=5):
"""Poll the console for job completion and retrieve PDF"""
jobname_upper = jn.upper()
start_time = time.time()
job_number = None
print(f"Polling for job: {jobname_upper}")
print(f"Timeout: {to}s, Poll interval: {poll_interval}s")
print(f"Console URL: {CONSOLE_URL}")
print(f"Console User: {CONSOLE_USER}")
print(f"Console Pass: {'***' if CONSOLE_PASS else 'NOT SET'}")
print()
# Phase 1: Find a job number
print("Phase 1: Looking for job submission ($HASP100)...")
while time.time() - start_time < to:
syslog = get_syslog()
if not syslog:
time.sleep(poll_interval)
continue
job_number = find_job_number(syslog, jobname_upper)
if job_number:
print(f"Found job number: J{job_number}")
break
time.sleep(poll_interval)
if not job_number:
print(f"Timeout: Job {jobname_upper} not found in console after {to}s")
return 1
# Phase 2: Wait for completion
print(f"Phase 2: Waiting for job completion ($HASP395 ENDED)...")
job_ended = False
job_printed = False
while time.time() - start_time < to:
syslog = get_syslog()
if not syslog:
time.sleep(poll_interval)
continue
job_ended = check_job_ended(syslog, jobname_upper, job_number)
job_printed = check_job_printed(syslog, jobname_upper, job_number)
if job_ended:
print(f"Job J{job_number} has ended")
break
time.sleep(poll_interval)
if not job_ended:
print(f"Timeout: Job J{job_number} did not complete after {to}s")
return 1
# Check if output was printed (required for PDF retrieval)
if not job_printed:
print(f"ERROR: Job J{job_number} completed but no output was printed ($HASP150 not found)")
print(f"This usually means MSGCLASS=H (hold) was used in the JCL")
print(f"Check TSO SDSF or console for job output manually")
print(f"To fix: Change JCL to use MSGCLASS=A for automatic printing")
return 1
print(f"Job J{job_number} completed and output printed!")
# Phase 3: Retrieve PDF
print("Phase 3: Retrieving PDF...")
# Give the PDF a moment to be written to disk
time.sleep(2)
# Check for local mounted directory (CI environment)
local_printout_dir = os.environ.get("LOCAL_PRINTOUT_DIR")
is_local = local_printout_dir and os.path.isdir(local_printout_dir)
if is_local:
print(f"Using local mounted directory: {local_printout_dir}")
pdf_path = find_pdf_for_job(job_number, jobname_upper, local_printout_dir)
if not pdf_path:
print(f"Error: PDF not found for J{job_number}_{jobname_upper}")
return 1
local_filename = f"{jobname_upper}_J{job_number}.pdf"
if retrieve_pdf(pdf_path, local_filename, is_local):
print(f"Success! Job output saved to: {local_filename}")
return 0
else:
return 1
if __name__ == "__main__":
if len(sys.argv) < 2:
print("Usage: poll_job.py <jobname> [timeout_seconds]")
print()
print("Arguments:")
print(" jobname - Job name to poll for (required)")
print(" timeout_seconds - Maximum time to wait (optional, default: 300)")
print()
print("Example:")
print(" poll_job.py SIMPLE2")
print(" poll_job.py SIMPLE2 600")
sys.exit(1)
jobname = sys.argv[1]
timeout = int(sys.argv[2]) if len(sys.argv) > 2 else 300
sys.exit(poll_for_job(jobname, timeout))

210
scripts/submit_job.py Executable file
View File

@ -0,0 +1,210 @@
#!/usr/bin/env python3
import sys
import subprocess
import tempfile
import os
import time
import socket
# Force temp files into a folder inside your project (fully owned by you)
custom_temp_dir = os.path.join(os.getcwd(), "tmp")
os.makedirs(custom_temp_dir, exist_ok=True)
tempfile.tempdir = custom_temp_dir
SRCLIB = "src"
JCLLIB = "jcl"
MVSHOST = "oldcomputernerd.com"
RDRPORT = 3505
MVS_PASSWORD = os.environ.get("MVS_BATCH_PASSWORD")
def wait_for_reader(host, port, wait_seconds=10):
"""
Wait for card reader to finish processing previous job.
Hercules keeps the socket in IO[n] open state while JES processes
the submitted job. We need to wait long enough for:
1. JES to read the job from the internal reader
2. Hercules to close the socket completely
3. The port to be ready for a new connection
A simple fixed delay is more reliable than trying to probe the port,
since the port will respond even when Hercules will reject connections.
"""
print(f"Waiting {wait_seconds} seconds for card reader to finish processing...")
for i in range(wait_seconds):
time.sleep(1)
if (i + 1) % 3 == 0:
print(f" {wait_seconds - i - 1} seconds remaining...")
print("Card reader should be ready now")
return True
def create_jcl_payload(local_file, dataset_name, member_name):
with open(local_file, 'r') as f:
sysin = f.readlines()
# PDS member: Use IEBUPDTE
jcl = f"""
//UPLOAD JOB (ACCT),'UPLOAD',
// USER=@05054,PASSWORD={MVS_PASSWORD},
// CLASS=A,MSGCLASS=H,NOTIFY=@05054
//COPY EXEC PGM=IEBUPDTE,PARM=NEW
//SYSPRINT DD SYSOUT=*
//SYSUT1 DD DUMMY
//SYSUT2 DD DSN={dataset_name},DISP=MOD,UNIT=SYSDA,
// DCB=(RECFM=FB,LRECL=80,BLKSIZE=0)
//SYSIN DD *
"""
# Append control statement, source lines, end, and terminator (no leading space on ./)
jcl += f"./ ADD NAME={member_name}\n"
for line in sysin:
line = line.rstrip('\n')
stripped = line.lstrip()
# Skip comment lines that would be interpreted as JCL
if stripped.startswith('//') or stripped.startswith('/*'):
continue
jcl += line[:80].ljust(80) + "\n"
jcl += "./ ENDUP\n"
jcl += "/*\n"
return jcl
def upload_source(local_file, dataset_name, member_name, mvshost=MVSHOST):
"""Upload source code to MVS PDS member"""
# Read the source file
# full path will come from the job runner
# filepath = os.path.join(SRCLIB, local_file)
payload = create_jcl_payload(local_file, dataset_name, member_name)
# Write JCL to temporary file and submit via netcat
with tempfile.NamedTemporaryFile(mode='w', delete=False, suffix='.jcl') as tmpfile:
tmpfile.write(payload)
tmpfile.flush()
tmpfile_path = tmpfile.name
try:
with open(tmpfile_path, 'rb') as f:
result = subprocess.run(
['nc', '-w', '5', mvshost, str(RDRPORT)],
input=f.read(),
check=True,
capture_output=True
)
print(f"Uploaded {local_file} to {dataset_name}({member_name})")
if result.stdout:
print("JES response:", result.stdout.decode(errors='ignore').strip())
return 0
except subprocess.CalledProcessError as e:
print(f"Upload failed: {e}")
print("stderr:", e.stderr.decode(errors='ignore'))
return 1
finally:
# Clean up outside
os.unlink(tmpfile_path)
def submit_jcl(job, mvshost="oldcomputernerd.com"):
"""Submit JCL job from local directory"""
subjcl = os.path.join(JCLLIB, f"{job.upper()}.jcl")
if not os.path.exists(subjcl):
print(f"JCL file {subjcl} not found")
return 1
try:
# Read the JCL file and send via netcat (same approach as upload_source)
with open(subjcl, 'rb') as f:
jcl_data = f.read()
print(f"Submitting {len(jcl_data)} bytes of JCL to {mvshost}:{RDRPORT}")
result = subprocess.run(
['nc', '-w', '5', mvshost, str(RDRPORT)],
input=jcl_data,
check=True,
capture_output=True
)
print(f"Submitted JCL job: {job}")
if result.stdout:
print("JES response:", result.stdout.decode(errors='ignore').strip())
if result.stderr:
print("netcat stderr:", result.stderr.decode(errors='ignore').strip())
if result.returncode != 0:
print(f"WARNING: netcat returned non-zero exit code: {result.returncode}")
return 1
return 0
except subprocess.CalledProcessError as e:
print(f"ERROR: JCL submission failed with exit code {e.returncode}")
print("stderr:", e.stderr.decode(errors='ignore'))
print("stdout:", e.stdout.decode(errors='ignore'))
return 1
except FileNotFoundError as e:
print(f"Error reading JCL file: {e}")
return 1
if __name__ == "__main__":
if len(sys.argv) < 2:
print("Usage: mvs_job.py <local_source_file> [destination_pds] [mvshost]")
print()
print("Arguments:")
print(" local_source_file - Path to source file (required)")
print(" destination_pds - PDS destination as DATASET(MEMBER) (optional)")
print(" Default: @05054.C90.SOURCE(basename)")
print(" mvshost - MVS host (optional, default: oldcomputernerd.com)")
print()
print("Examples:")
print(" mvs_job.py src/sieve11.c")
print(" mvs_job.py src/sieve11.c '@05054.C90.SOURCE(SIEVE11)'")
print(" mvs_job.py src/hello.c '@05054.C90.SOURCE(HELLO)' mainframe.example.com")
print()
print("Notes:")
print(" - JCL file is assumed to be jcl/<basename>.jcl")
print(" - Member name defaults to source filename without extension")
sys.exit(1)
local_file = sys.argv[1]
# Extract base name without extension for defaults
basename = os.path.splitext(os.path.basename(local_file))[0].upper()
valid_host_source_pds_suffixes = ['C', 'ALG', 'ASM', 'BAS', 'COB', 'PAS', 'PL360']
default_suffix = valid_host_source_pds_suffixes[0]
# Parse destination PDS (optional second argument)
if len(sys.argv) > 2 and sys.argv[2]:
destination = sys.argv[2]
# Parse PDS syntax: DATASET(MEMBER)
if '(' in destination and destination.endswith(')'):
dataset_name = destination[:destination.index('(')]
member_name = destination[destination.index('(')+1:-1]
else:
print(f"Error: Invalid PDS syntax '{destination}'. Use format: DATASET(MEMBER)")
sys.exit(1)
else:
# Default destination
dataset_name = f"@05054.SRCLIB.{default_suffix}"
member_name = basename.upper()
# JCL job name defaults to basename
job = basename.upper()
# Optional host override
mvshost = sys.argv[3] if len(sys.argv) > 3 else MVSHOST
print(f"Source: {local_file}")
print(f"Destination: {dataset_name}({member_name})")
print(f"JCL: jcl/{job}.jcl")
print(f"Host: {mvshost}")
print()
# Step 1: Upload source to PDS
if upload_source(local_file, dataset_name, member_name, mvshost) != 0:
sys.exit(1)
# Wait for card reader to finish processing upload job before submitting compile job
# This prevents "device busy or interrupt pending" errors from Hercules
wait_for_reader(mvshost, RDRPORT, wait_seconds=10)
# Step 2: Submit JCL job
exit_code = submit_jcl(job, mvshost)
sys.exit(exit_code)