{ "cells": [ { "cell_type": "markdown", "metadata": {}, "source": [ "# Not Filter Demo\n", "\n", "Example how to wrap a filter in a not filter to negate a filter\n", "\n", "## Imports" ] }, { "cell_type": "code", "execution_count": 1, "metadata": {}, "outputs": [], "source": [ "from pyspark import SparkConf, SparkContext\n", "from mmtfPyspark.io import mmtfReader\n", "from mmtfPyspark.filters import ContainsDnaChain, ContainsLProteinChain, NotFilter\n", "from mmtfPyspark.structureViewer import view_structure" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ "## Configure Spark" ] }, { "cell_type": "code", "execution_count": 6, "metadata": {}, "outputs": [ { "ename": "ValueError", "evalue": "Cannot run multiple SparkContexts at once; existing SparkContext(app=notFilterExample, master=local[*]) created by __init__ at :2 ", "output_type": "error", "traceback": [ "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m", "\u001b[0;31mValueError\u001b[0m Traceback (most recent call last)", "\u001b[0;32m\u001b[0m in \u001b[0;36m\u001b[0;34m()\u001b[0m\n\u001b[1;32m 1\u001b[0m \u001b[0mconf\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mSparkConf\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0msetMaster\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m\"local[*]\"\u001b[0m\u001b[0;34m)\u001b[0m \u001b[0;34m.\u001b[0m\u001b[0msetAppName\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m\"notFilterExample\"\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m----> 2\u001b[0;31m \u001b[0msc\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mSparkContext\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mconf\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mconf\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m", "\u001b[0;32m/srv/spark/python/pyspark/context.py\u001b[0m in \u001b[0;36m__init__\u001b[0;34m(self, master, appName, sparkHome, pyFiles, environment, batchSize, serializer, conf, gateway, jsc, profiler_cls)\u001b[0m\n\u001b[1;32m 113\u001b[0m \"\"\"\n\u001b[1;32m 114\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_callsite\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mfirst_spark_call\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m \u001b[0;32mor\u001b[0m \u001b[0mCallSite\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;32mNone\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;32mNone\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;32mNone\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 115\u001b[0;31m \u001b[0mSparkContext\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_ensure_initialized\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mself\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mgateway\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0mgateway\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mconf\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0mconf\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 116\u001b[0m \u001b[0;32mtry\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 117\u001b[0m self._do_init(master, appName, sparkHome, pyFiles, environment, batchSize, serializer,\n", "\u001b[0;32m/srv/spark/python/pyspark/context.py\u001b[0m in \u001b[0;36m_ensure_initialized\u001b[0;34m(cls, instance, gateway, conf)\u001b[0m\n\u001b[1;32m 273\u001b[0m \u001b[0;34m\" created by %s at %s:%s \"\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 274\u001b[0m % (currentAppName, currentMaster,\n\u001b[0;32m--> 275\u001b[0;31m callsite.function, callsite.file, callsite.linenum))\n\u001b[0m\u001b[1;32m 276\u001b[0m \u001b[0;32melse\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 277\u001b[0m \u001b[0mSparkContext\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_active_spark_context\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0minstance\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", "\u001b[0;31mValueError\u001b[0m: Cannot run multiple SparkContexts at once; existing SparkContext(app=notFilterExample, master=local[*]) created by __init__ at :2 " ] } ], "source": [ "conf = SparkConf().setMaster(\"local[*]\") \\\n", " .setAppName(\"notFilterExample\")\n", "sc = SparkContext(conf = conf)" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ "## Read in MMTF Files" ] }, { "cell_type": "code", "execution_count": 7, "metadata": {}, "outputs": [], "source": [ "path = \"../../resources/mmtf_reduced_sample/\"\n", "\n", "pdb = mmtfReader.read_sequence_file(path, sc)" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ "## Filter by contains L Protein Chain" ] }, { "cell_type": "code", "execution_count": 8, "metadata": {}, "outputs": [], "source": [ "structures = pdb.filter(ContainsLProteinChain())" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ "## Using Not filter to reverse a filter\n", "\n", "Get entires that does not contain DNA Chains" ] }, { "cell_type": "code", "execution_count": 9, "metadata": {}, "outputs": [], "source": [ "structures = structures.filter(NotFilter(ContainsDnaChain()))" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ "## Count number of entires" ] }, { "cell_type": "code", "execution_count": 10, "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "PDB entires without DNA chains : 4820\n" ] } ], "source": [ "count = structures.count()\n", "\n", "print(f\"PDB entires without DNA chains : {count}\")" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ "## Visualize Structures" ] }, { "cell_type": "code", "execution_count": 11, "metadata": {}, "outputs": [ { "data": { "application/vnd.jupyter.widget-view+json": { "model_id": "d8ab56847c7d48b3ba6203275700490b", "version_major": 2, "version_minor": 0 }, "text/html": [ "

Failed to display Jupyter Widget of type interactive.

\n", "

\n", " If you're reading this message in the Jupyter Notebook or JupyterLab Notebook, it may mean\n", " that the widgets JavaScript is still loading. If this message persists, it\n", " likely means that the widgets JavaScript library is either not installed or\n", " not enabled. See the Jupyter\n", " Widgets Documentation for setup instructions.\n", "

\n", "

\n", " If you're reading this message in another frontend (for example, a static\n", " rendering on GitHub or NBViewer),\n", " it may mean that your frontend doesn't currently support widgets.\n", "

\n" ], "text/plain": [ "interactive(children=(IntSlider(value=0, description='i', max=4819), Output()), _dom_classes=('widget-interact',))" ] }, "metadata": {}, "output_type": "display_data" }, { "data": { "text/plain": [ ".view3d>" ] }, "execution_count": 11, "metadata": {}, "output_type": "execute_result" } ], "source": [ "view_structure(structures.keys().collect())" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ "## Terminate Spark " ] }, { "cell_type": "code", "execution_count": 7, "metadata": {}, "outputs": [], "source": [ "sc.stop()" ] } ], "metadata": { "kernelspec": { "display_name": "Python 3", "language": "python", "name": "python3" }, "language_info": { "codemirror_mode": { "name": "ipython", "version": 3 }, "file_extension": ".py", "mimetype": "text/x-python", "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", "version": "3.6.0" } }, "nbformat": 4, "nbformat_minor": 2 }