diff --git a/.github/workflows/python-package.yml b/.github/workflows/python-package.yml index a8c211c..40f589d 100644 --- a/.github/workflows/python-package.yml +++ b/.github/workflows/python-package.yml @@ -2,13 +2,12 @@ name: Python package on: push: - branches: [ "main", "master" ] + branches: [ "main" ] pull_request: - branches: [ "main", "master" ] + branches: [ "main" ] jobs: build: - runs-on: ${{ matrix.os-version }} name: ${{ matrix.os-version }} (${{ matrix.python-version }}) @@ -16,38 +15,35 @@ jobs: fail-fast: false matrix: os-version: ["ubuntu-latest"] - python-version: ["3.9", "3.10", "3.11"] #, "3.12"] + python-version: ["3.11"] steps: - - uses: actions/checkout@v3 - - name: "Set up Python ${{ matrix.python-version }}" - uses: actions/setup-python@v3 - with: - python-version: ${{ matrix.python-version }} - cache: 'pip' - - name: "Install flake8" - run: | - pip install flake8 - - name: "Lint with flake8" - run: | - # stop the build if there are Python syntax errors or undefined names - flake8 . --count --select=E9,F63,F7,F82 --show-source --statistics - # exit-zero treats all errors as warnings. The GitHub editor is 127 chars wide - flake8 . --count --exit-zero --max-complexity=10 --max-line-length=127 --statistics - - name: "Install miniconda" - uses: conda-incubator/setup-miniconda@v3 - with: - miniconda-version: "latest" - auto-update-conda: true - python-version: ${{ matrix.python-version }} - channels: conda-forge,bioconda - environment-file: environment.yml - # - name: "Install pytest" - # shell: bash -l {0} - # run: | - # python -m pip install --upgrade pip - # pip install setuptools wheel build pytest - # - name: "Test with pytest" - # shell: bash -l {0} - # run: | - # pytest -s + - uses: actions/checkout@v3 + + - name: Set up Python ${{ matrix.python-version }} + uses: actions/setup-python@v3 + with: + python-version: ${{ matrix.python-version }} + + - name: Cache pip dependencies + uses: actions/cache@v4 + with: + path: ~/.cache/pip + key: ${{ runner.os }}-pip-${{ matrix.python-version }}-${{ hashFiles('**/requirements.txt', 'environment.yml') }} + + - name: Install flake8 + run: pip install flake8 + + - name: Lint with flake8 + run: | + flake8 . --count --select=E9,F63,F7,F82 --show-source --statistics + flake8 . --count --exit-zero --max-complexity=10 --max-line-length=127 --statistics + + - name: Install miniconda + uses: conda-incubator/setup-miniconda@v3 + with: + miniconda-version: "latest" + auto-update-conda: true + python-version: ${{ matrix.python-version }} + channels: conda-forge,bioconda + environment-file: environment.yml diff --git a/pyproject.toml b/pyproject.toml index bd4efdd..3e1949d 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,7 +1,7 @@ [tool.poetry] name = "ScreenPro2" description = "Flexible analysis of high-content CRISPR screening" -version = "0.6.0" +version = "0.6.1" authors = [ "Abolfazl Arab ", "Nick Youngblut ", diff --git a/screenpro/assays/__init__.py b/screenpro/assays/__init__.py index dcc9988..714c592 100644 --- a/screenpro/assays/__init__.py +++ b/screenpro/assays/__init__.py @@ -71,8 +71,8 @@ def _calculateGrowthFactor(self, untreated, treated, db_rate_col): growth_factors = [] # calculate growth factor for gamma, tau, or rho score per replicates for replicate in adat.obs.replicate.unique(): - db_untreated = adat.obs.query(f'condition == "{untreated}" & replicate == {str(replicate)}')[db_rate_col][0] - db_treated = adat.obs.query(f'condition == "{treated}" & replicate == {str(replicate)}')[db_rate_col][0] + db_untreated = adat.obs.query(f'condition == "{untreated}" & replicate == {str(replicate)}')[db_rate_col].iloc[0] + db_treated = adat.obs.query(f'condition == "{treated}" & replicate == {str(replicate)}')[db_rate_col].iloc[0] growth_factors.append(('gamma', db_untreated, replicate, f'gamma_replicate_{replicate}')) growth_factors.append(('tau', db_treated, replicate, f'tau_replicate_{replicate}')) diff --git a/screenpro/ngs/cas9.py b/screenpro/ngs/cas9.py index 6f07804..23e0173 100644 --- a/screenpro/ngs/cas9.py +++ b/screenpro/ngs/cas9.py @@ -154,10 +154,9 @@ def map_to_library_single_guide(df_count, library, return_type='all', verbose=Fa ) if verbose: - print("% mapped reads", - 100 * \ - res_map.to_pandas()['count'].fillna(0).sum() / \ - int(res.select(pl.sum("count")).to_pandas()['count']) + print( + "% mapped reads", + 100 * res_map['count'].sum() / res["count"].sum() ) if return_type == 'unmapped': @@ -221,8 +220,8 @@ def map_to_library_dual_guide(df_count, library, get_recombinant=False, return_t if verbose: print("% mapped reads", 100 * \ - res_map.to_pandas()['count'].fillna(0).sum() / \ - int(res.select(pl.sum("count")).to_pandas()['count']) + res_map['count'].sum() / \ + res["count"].sum() ) if get_recombinant: @@ -230,8 +229,8 @@ def map_to_library_dual_guide(df_count, library, get_recombinant=False, return_t if verbose: print("% unmapped reads", 100 * \ - res_unmap.to_pandas()['count'].fillna(0).sum() / \ - int(res.select(pl.sum("count")).to_pandas()['count']) + res_unmap['count'].sum() / \ + res["count"].sum() ) sgRNA_table = pd.concat([ @@ -253,8 +252,8 @@ def map_to_library_dual_guide(df_count, library, get_recombinant=False, return_t if verbose: print("% fully remapped recombination events", 100 * \ - res_recomb_events.drop_nulls().to_pandas()['count'].fillna(0).sum() / \ - int(res.select(pl.sum("count")).to_pandas()['count']) + res_recomb_events.drop_nulls()['count'].sum() / \ + res['count'].sum() ) if return_type == 'unmapped':