diff --git a/.github/workflows/community-report.yml b/.github/workflows/community-report.yml new file mode 100644 index 0000000000..88acb4c701 --- /dev/null +++ b/.github/workflows/community-report.yml @@ -0,0 +1,188 @@ +name: Generate Weekly Community Report 📊 + +on: + schedule: + - cron: '0 12 * * 1' # Run at 12:00 UTC on Monday + workflow_dispatch: + inputs: + days: + description: 'Number of days to look back for the report' + required: true + default: '7' + +jobs: + generate-report: + name: Generate Report 📝 + runs-on: ubuntu-latest + permissions: + issues: write + pull-requests: read + discussions: read + contents: read + id-token: write + + steps: + - name: Generate GitHub App Token 🔑 + id: generate_token + uses: actions/create-github-app-token@v1 + with: + app-id: ${{ secrets.APP_ID }} + private-key: ${{ secrets.PRIVATE_KEY }} + + - name: Generate Report 📜 + id: report + env: + GH_TOKEN: ${{ steps.generate_token.outputs.token }} + REPO: ${{ github.repository }} + DAYS: ${{ github.event.inputs.days || '7' }} + run: | + set -e + + START_DATE=$(date -u -d "$DAYS days ago" +'%Y-%m-%d') + END_DATE=$(date -u +'%Y-%m-%d') + echo "⏳ Generating report for contributions from $START_DATE to $END_DATE..." + + declare -A author_is_googler + check_googler_status() { + local author=$1 + if [[ "$author" == *"[bot]" ]]; then + author_is_googler[$author]=1 + return 1 + fi + if [[ -v "author_is_googler[$author]" ]]; then + return ${author_is_googler[$author]} + fi + + if gh api "orgs/googlers/members/$author" --silent 2>/dev/null; then + echo "🧑‍💻 $author is a Googler." + author_is_googler[$author]=0 + else + echo "🌍 $author is a community contributor." + author_is_googler[$author]=1 + fi + return ${author_is_googler[$author]} + } + + googler_issues=0 + non_googler_issues=0 + googler_prs=0 + non_googler_prs=0 + + echo "🔎 Fetching issues and pull requests..." + ITEMS_JSON=$(gh search issues --repo "$REPO" "created:>$START_DATE" --json author,isPullRequest --limit 1000) + + for row in $(echo "${ITEMS_JSON}" | jq -r '.[] | @base64'); do + _jq() { + echo ${row} | base64 --decode | jq -r ${1} + } + author=$(_jq '.author.login') + is_pr=$(_jq '.isPullRequest') + + if [[ -z "$author" || "$author" == "null" ]]; then + continue + fi + + if check_googler_status "$author"; then + if [[ "$is_pr" == "true" ]]; then + ((googler_prs++)) + else + ((googler_issues++)) + fi + else + if [[ "$is_pr" == "true" ]]; then + ((non_googler_prs++)) + else + ((non_googler_issues++)) + fi + fi + done + + googler_discussions=0 + non_googler_discussions=0 + + echo "🗣️ Fetching discussions..." + DISCUSSION_QUERY=''' + query($q: String!) { + search(query: $q, type: DISCUSSION, first: 100) { + nodes { + ... on Discussion { + author { + login + } + } + } + } + }''' + DISCUSSIONS_JSON=$(gh api graphql -f q="repo:$REPO created:>$START_DATE" -f query="$DISCUSSION_QUERY") + + for row in $(echo "${DISCUSSIONS_JSON}" | jq -r '.data.search.nodes[] | @base64'); do + _jq() { + echo ${row} | base64 --decode | jq -r ${1} + } + author=$(_jq '.author.login') + + if [[ -z "$author" || "$author" == "null" ]]; then + continue + fi + + if check_googler_status "$author"; then + ((googler_discussions++)) + else + ((non_googler_discussions++)) + fi + done + + echo "✍️ Generating report content..." + REPORT_TITLE="Community Contribution Report: $START_DATE to $END_DATE" + TOTAL_ISSUES=$((googler_issues + non_googler_issues)) + TOTAL_PRS=$((googler_prs + non_googler_prs)) + TOTAL_DISCUSSIONS=$((googler_discussions + non_googler_discussions)) + + REPORT_BODY=$(cat <> $GITHUB_OUTPUT + echo "$REPORT_BODY" >> $GITHUB_OUTPUT + echo "EOF" >> $GITHUB_OUTPUT + + echo "📊 Community Contribution Report:" + echo "$REPORT_BODY" + + - name: 🤖 Get Insights from Report + if: steps.report.outputs.report_body != '' + uses: google-gemini/gemini-cli-action@41c0f1b3cbd1a0b284251bd1aac034edd07a3a2f + env: + GITHUB_TOKEN: ${{ steps.generate_token.outputs.token }} + with: + version: 0.1.8-rc.0 + GEMINI_API_KEY: ${{ secrets.GEMINI_API_KEY }} + OTLP_GCP_WIF_PROVIDER: ${{ secrets.OTLP_GCP_WIF_PROVIDER }} + OTLP_GCP_SERVICE_ACCOUNT: ${{ secrets.OTLP_GCP_SERVICE_ACCOUNT }} + OTLP_GOOGLE_CLOUD_PROJECT: ${{ secrets.OTLP_GOOGLE_CLOUD_PROJECT }} + settings_json: | + { + "coreTools": [ + "run_shell_command(gh issue list)", + "run_shell_command(gh pr list)", + "run_shell_command(gh search issues)", + "run_shell_command(gh search prs)" + ] + } + prompt: | + You are a helpful assistant that analyzes community contribution reports. + Based on the following report, please provide a brief summary and highlight any interesting trends or potential areas for improvement. + + Report: + ${{ steps.report.outputs.report_body }} diff --git a/README.md b/README.md index c7092e4064..d7e2b3e619 100644 --- a/README.md +++ b/README.md @@ -38,9 +38,9 @@ With the Gemini CLI you can: You are now ready to use the Gemini CLI! -### For advanced use or increased limits: +### Use a Gemini API key: -If you need to use a specific model or require a higher request capacity, you can use an API key: +The Gemini API provides a free tier with [100 requets per day](https://ai.google.dev/gemini-api/docs/rate-limits#free-tier) using Gemini 2.5 Pro, control over which model you use, and access to higher rate limits (with a paid plan): 1. Generate a key from [Google AI Studio](https://aistudio.google.com/apikey). 2. Set it as an environment variable in your terminal. Replace `YOUR_API_KEY` with your generated key. @@ -49,6 +49,8 @@ If you need to use a specific model or require a higher request capacity, you ca export GEMINI_API_KEY="YOUR_API_KEY" ``` +3. (Optionally) Upgrade your Gemini API project to a paid plan on the API key page (will automatically unlock [Tier 1 rate limits](https://ai.google.dev/gemini-api/docs/rate-limits#tier-1)) + For other authentication methods, including Google Workspace accounts, see the [authentication](./docs/cli/authentication.md) guide. ## Examples diff --git a/packages/cli/package.json b/packages/cli/package.json index e4a6a7ee54..e5f88f97e2 100644 --- a/packages/cli/package.json +++ b/packages/cli/package.json @@ -10,7 +10,6 @@ }, "scripts": { "build": "node ../../scripts/build_package.js", - "clean": "rm -rf dist", "start": "node dist/index.js", "debug": "node --inspect-brk dist/index.js", "lint": "eslint . --ext .ts,.tsx", diff --git a/packages/cli/src/gemini.tsx b/packages/cli/src/gemini.tsx index 4a0014e1af..cc1f41b6ee 100644 --- a/packages/cli/src/gemini.tsx +++ b/packages/cli/src/gemini.tsx @@ -18,6 +18,7 @@ import { LoadedSettings, loadSettings, SettingScope, + USER_SETTINGS_PATH, } from './config/settings.js'; import { themeManager } from './ui/themes/theme-manager.js'; import { getStartupWarnings } from './utils/startupWarnings.js'; @@ -279,7 +280,7 @@ async function validateNonInterActiveAuth( // still expect that exists if (!selectedAuthType && !process.env.GEMINI_API_KEY) { console.error( - 'Please set an Auth method in your .gemini/settings.json OR specify GEMINI_API_KEY env variable file before running', + `Please set an Auth method in your ${USER_SETTINGS_PATH} OR specify GEMINI_API_KEY env variable file before running`, ); process.exit(1); } diff --git a/packages/cli/src/ui/components/shared/MaxSizedBox.tsx b/packages/cli/src/ui/components/shared/MaxSizedBox.tsx index 8880e89429..c803b680fa 100644 --- a/packages/cli/src/ui/components/shared/MaxSizedBox.tsx +++ b/packages/cli/src/ui/components/shared/MaxSizedBox.tsx @@ -278,7 +278,10 @@ function visitBoxRow(element: React.ReactNode): Row { // Allow the key prop, which is automatically added by React. maxExpectedProps += 1; } - if (boxProps.flexDirection !== 'row') { + if ( + boxProps.flexDirection !== undefined && + boxProps.flexDirection !== 'row' + ) { debugReportError( 'MaxSizedBox children must have flexDirection="row".', element, diff --git a/packages/core/package.json b/packages/core/package.json index 08e5a925f2..a73c973823 100644 --- a/packages/core/package.json +++ b/packages/core/package.json @@ -7,7 +7,6 @@ "main": "dist/index.js", "scripts": { "build": "node ../../scripts/build_package.js", - "clean": "rm -rf dist", "lint": "eslint . --ext .ts,.tsx", "format": "prettier --write .", "test": "vitest run", diff --git a/packages/core/src/tools/read-many-files.test.ts b/packages/core/src/tools/read-many-files.test.ts index 666c484c03..697b7d1ba4 100644 --- a/packages/core/src/tools/read-many-files.test.ts +++ b/packages/core/src/tools/read-many-files.test.ts @@ -14,6 +14,33 @@ import fs from 'fs'; // Actual fs for setup import os from 'os'; import { Config } from '../config/config.js'; +vi.mock('mime-types', () => { + const lookup = (filename: string) => { + if (filename.endsWith('.ts') || filename.endsWith('.js')) { + return 'text/plain'; + } + if (filename.endsWith('.png')) { + return 'image/png'; + } + if (filename.endsWith('.pdf')) { + return 'application/pdf'; + } + if (filename.endsWith('.mp3') || filename.endsWith('.wav')) { + return 'audio/mpeg'; + } + if (filename.endsWith('.mp4') || filename.endsWith('.mov')) { + return 'video/mp4'; + } + return false; + }; + return { + default: { + lookup, + }, + lookup, + }; +}); + describe('ReadManyFilesTool', () => { let tool: ReadManyFilesTool; let tempRootDir: string; diff --git a/packages/core/src/utils/fileUtils.test.ts b/packages/core/src/utils/fileUtils.test.ts index 4f4c7c1ee5..0455b6e1e0 100644 --- a/packages/core/src/utils/fileUtils.test.ts +++ b/packages/core/src/utils/fileUtils.test.ts @@ -211,6 +211,16 @@ describe('fileUtils', () => { expect(detectFileType('file.pdf')).toBe('pdf'); }); + it('should detect audio type by extension', () => { + mockMimeLookup.mockReturnValueOnce('audio/mpeg'); + expect(detectFileType('song.mp3')).toBe('audio'); + }); + + it('should detect video type by extension', () => { + mockMimeLookup.mockReturnValueOnce('video/mp4'); + expect(detectFileType('movie.mp4')).toBe('video'); + }); + it('should detect known binary extensions as binary (e.g. .zip)', () => { mockMimeLookup.mockReturnValueOnce('application/zip'); expect(detectFileType('archive.zip')).toBe('binary'); @@ -427,5 +437,23 @@ describe('fileUtils', () => { ); expect(result.isTruncated).toBe(true); }); + + it('should return an error if the file size exceeds 20MB', async () => { + // Create a file just over 20MB + const twentyOneMB = 21 * 1024 * 1024; + const buffer = Buffer.alloc(twentyOneMB, 0x61); // Fill with 'a' + actualNodeFs.writeFileSync(testTextFilePath, buffer); + + const result = await processSingleFileContent( + testTextFilePath, + tempRootDir, + ); + + expect(result.error).toContain('File size exceeds the 20MB limit'); + expect(result.returnDisplay).toContain( + 'File size exceeds the 20MB limit', + ); + expect(result.llmContent).toContain('File size exceeds the 20MB limit'); + }); }); }); diff --git a/packages/core/src/utils/fileUtils.ts b/packages/core/src/utils/fileUtils.ts index cb4d333ad4..5a05d5134b 100644 --- a/packages/core/src/utils/fileUtils.ts +++ b/packages/core/src/utils/fileUtils.ts @@ -94,19 +94,27 @@ export function isBinaryFile(filePath: string): boolean { /** * Detects the type of file based on extension and content. * @param filePath Path to the file. - * @returns 'text', 'image', 'pdf', or 'binary'. + * @returns 'text', 'image', 'pdf', 'audio', 'video', or 'binary'. */ export function detectFileType( filePath: string, -): 'text' | 'image' | 'pdf' | 'binary' { +): 'text' | 'image' | 'pdf' | 'audio' | 'video' | 'binary' { const ext = path.extname(filePath).toLowerCase(); const lookedUpMimeType = mime.lookup(filePath); // Returns false if not found, or the mime type string - if (lookedUpMimeType && lookedUpMimeType.startsWith('image/')) { - return 'image'; - } - if (lookedUpMimeType && lookedUpMimeType === 'application/pdf') { - return 'pdf'; + if (lookedUpMimeType) { + if (lookedUpMimeType.startsWith('image/')) { + return 'image'; + } + if (lookedUpMimeType.startsWith('audio/')) { + return 'audio'; + } + if (lookedUpMimeType.startsWith('video/')) { + return 'video'; + } + if (lookedUpMimeType === 'application/pdf') { + return 'pdf'; + } } // Stricter binary check for common non-text extensions before content check @@ -187,7 +195,7 @@ export async function processSingleFileContent( error: `File not found: ${filePath}`, }; } - const stats = fs.statSync(filePath); // Sync check + const stats = await fs.promises.stat(filePath); if (stats.isDirectory()) { return { llmContent: '', @@ -196,6 +204,19 @@ export async function processSingleFileContent( }; } + const fileSizeInBytes = stats.size; + // 20MB limit + const maxFileSize = 20 * 1024 * 1024; + + if (fileSizeInBytes > maxFileSize) { + throw new Error( + `File size exceeds the 20MB limit: ${filePath} (${( + fileSizeInBytes / + (1024 * 1024) + ).toFixed(2)}MB)`, + ); + } + const fileType = detectFileType(filePath); const relativePathForDisplay = path .relative(rootDirectory, filePath) @@ -253,7 +274,9 @@ export async function processSingleFileContent( }; } case 'image': - case 'pdf': { + case 'pdf': + case 'audio': + case 'video': { const contentBuffer = await fs.promises.readFile(filePath); const base64Data = contentBuffer.toString('base64'); return { diff --git a/scripts/clean.js b/scripts/clean.js index 196384b0a2..d53d097ead 100644 --- a/scripts/clean.js +++ b/scripts/clean.js @@ -17,10 +17,10 @@ // See the License for the specific language governing permissions and // limitations under the License. -import { execSync } from 'child_process'; -import { rmSync } from 'fs'; +import { rmSync, readFileSync } from 'fs'; import { dirname, join } from 'path'; import { fileURLToPath } from 'url'; +import { globSync } from 'glob'; const __dirname = dirname(fileURLToPath(import.meta.url)); const root = join(__dirname, '..'); @@ -32,4 +32,16 @@ rmSync(join(root, 'packages/cli/src/generated/'), { recursive: true, force: true, }); -execSync('npm run clean --workspaces', { stdio: 'inherit', cwd: root }); +const RMRF_OPTIONS = { recursive: true, force: true }; +rmSync(join(root, 'bundle'), RMRF_OPTIONS); +// Dynamically clean dist directories in all workspaces +const rootPackageJson = JSON.parse( + readFileSync(join(root, 'package.json'), 'utf-8'), +); +for (const workspace of rootPackageJson.workspaces) { + const packages = globSync(join(workspace, 'package.json'), { cwd: root }); + for (const pkgPath of packages) { + const pkgDir = dirname(join(root, pkgPath)); + rmSync(join(pkgDir, 'dist'), RMRF_OPTIONS); + } +}