The Altair Community is migrating to a new platform to provide a better experience for you. In preparation for the migration, the Altair Community is on read-only mode from October 28 - November 6, 2024. Technical support via cases will continue to work as is. For any urgent requests from Students/Faculty members, please submit the form linked here

Process Documents from Files: Include all subdirectories

roger_rutishausroger_rutishaus Member Posts: 8 Contributor II
edited December 2018 in Help

Hi all,

 

In the parameters box of "Process Documents from Files" I can set the "text directories". 

I have a lot of html files on local hard drive, in a folder called "webpage", with many subfolders (~100). Too many subfolders to add them all separately. I am missing a checkbox that enables to include all the subfolders. Is there a way I can achieve this? I have created a CSV file with columns "class name" and "directory". Would be great if i could import this.

 

Cheers, Roger

Best Answer

  • Telcontar120Telcontar120 RapidMiner Certified Analyst, RapidMiner Certified Expert, Member Posts: 1,635 Unicorn
    Solution Accepted

    If you don't want to enter the directories manually in that operator (which you can do, it sounds like you understand that), then another option would be to use "Loop Files" which does have a "recursive directory" parameter option and do your document processing inside the loop.

    Brian T.
    Lindon Ventures 
    Data Science Consulting from Certified RapidMiner Experts

Answers

  • roger_rutishausroger_rutishaus Member Posts: 8 Contributor II

    Thank you Brian. Yes, I know I can enter them manually, but I have like 100 subfolders and it would take me too long. 

    I'll give the "Loop Files" a try!

  • JEdwardJEdward RapidMiner Certified Analyst, RapidMiner Certified Expert, Member Posts: 578 Unicorn

    Here is a hastily cobbled together example showing how you might use loop files with your CSV of directories & class labels.

    This might be useful if for some reason your directories might be in different locations rather than all nested nicely.

    It uses macros to populate the parameters wanted.

     

    <?xml version="1.0" encoding="UTF-8"?><process version="8.1.001">
    <context>
    <input/>
    <output/>
    <macros/>
    </context>
    <operator activated="true" class="process" compatibility="8.1.001" expanded="true" name="Process">
    <process expanded="true">
    <operator activated="true" class="read_csv" compatibility="8.1.001" expanded="true" height="68" name="Read CSV" width="90" x="45" y="85">
    <list key="annotations"/>
    <list key="data_set_meta_data_information"/>
    </operator>
    <operator activated="true" class="loop_examples" compatibility="8.1.001" expanded="true" height="103" name="Loop Examples" width="90" x="179" y="85">
    <process expanded="true">
    <operator activated="true" class="extract_macro" compatibility="8.1.001" expanded="true" height="68" name="Extract Macro" width="90" x="45" y="85">
    <parameter key="macro" value="myClass"/>
    <parameter key="macro_type" value="data_value"/>
    <parameter key="attribute_name" value="Class"/>
    <parameter key="example_index" value="%{example}"/>
    <list key="additional_macros">
    <parameter key="myDirectory" value="Directory"/>
    </list>
    <description align="center" color="transparent" colored="false" width="126">Extract the directory and class label values as macros</description>
    </operator>
    <operator activated="true" class="concurrency:loop_files" compatibility="8.1.001" expanded="true" height="82" name="Loop Files" width="90" x="246" y="85">
    <parameter key="directory" value="%{myDirectory}"/>
    <process expanded="true">
    <operator activated="true" class="text:documents_to_data" compatibility="8.1.000" expanded="true" height="82" name="Documents to Data" width="90" x="179" y="34">
    <parameter key="text_attribute" value="text"/>
    <description align="center" color="transparent" colored="false" width="126">Read the text</description>
    </operator>
    <operator activated="true" class="generate_attributes" compatibility="8.1.001" expanded="true" height="82" name="Generate Attributes" width="90" x="380" y="34">
    <list key="function_descriptions">
    <parameter key="Label" value="%{myClass}"/>
    </list>
    <description align="center" color="transparent" colored="false" width="126">Add the class macro as the label attribute.</description>
    </operator>
    <operator activated="true" class="set_role" compatibility="8.1.001" expanded="true" height="82" name="Set Role" width="90" x="514" y="34">
    <parameter key="attribute_name" value="Label"/>
    <parameter key="target_role" value="label"/>
    <list key="set_additional_roles"/>
    </operator>
    <connect from_port="file object" to_op="Documents to Data" to_port="documents 1"/>
    <connect from_op="Documents to Data" from_port="example set" to_op="Generate Attributes" to_port="example set input"/>
    <connect from_op="Generate Attributes" from_port="example set output" to_op="Set Role" to_port="example set input"/>
    <connect from_op="Set Role" from_port="example set output" to_port="output 1"/>
    <portSpacing port="source_file object" spacing="0"/>
    <portSpacing port="source_input 1" spacing="0"/>
    <portSpacing port="source_input 2" spacing="0"/>
    <portSpacing port="sink_output 1" spacing="0"/>
    <portSpacing port="sink_output 2" spacing="0"/>
    </process>
    <description align="center" color="transparent" colored="false" width="126">Use the directory macro as a parameter in Loop Files</description>
    </operator>
    <connect from_port="example set" to_op="Extract Macro" to_port="example set"/>
    <connect from_op="Extract Macro" from_port="example set" to_op="Loop Files" to_port="input 1"/>
    <connect from_op="Loop Files" from_port="output 1" to_port="output 1"/>
    <portSpacing port="source_example set" spacing="0"/>
    <portSpacing port="sink_example set" spacing="0"/>
    <portSpacing port="sink_output 1" spacing="0"/>
    <portSpacing port="sink_output 2" spacing="0"/>
    </process>
    <description align="center" color="transparent" colored="false" width="126">Loop the CSV</description>
    </operator>
    <operator activated="true" class="append" compatibility="8.1.001" expanded="true" height="82" name="Append" width="90" x="313" y="85"/>
    <operator activated="true" class="text:process_document_from_data" compatibility="8.1.000" expanded="true" height="82" name="Process Documents from Data" width="90" x="447" y="34">
    <list key="specify_weights"/>
    <process expanded="true">
    <operator activated="true" class="text:tokenize" compatibility="8.1.000" expanded="true" height="68" name="Tokenize" width="90" x="112" y="34"/>
    <connect from_port="document" to_op="Tokenize" to_port="document"/>
    <connect from_op="Tokenize" from_port="document" to_port="document 1"/>
    <portSpacing port="source_document" spacing="0"/>
    <portSpacing port="sink_document 1" spacing="0"/>
    <portSpacing port="sink_document 2" spacing="0"/>
    <description align="center" color="yellow" colored="false" height="105" resized="false" width="180" x="80" y="124">Some Preprocessing here</description>
    </process>
    </operator>
    <connect from_op="Read CSV" from_port="output" to_op="Loop Examples" to_port="example set"/>
    <connect from_op="Loop Examples" from_port="output 1" to_op="Append" to_port="example set 1"/>
    <connect from_op="Append" from_port="merged set" to_op="Process Documents from Data" to_port="example set"/>
    <connect from_op="Process Documents from Data" from_port="example set" to_port="result 1"/>
    <portSpacing port="source_input 1" spacing="0"/>
    <portSpacing port="sink_result 1" spacing="0"/>
    <portSpacing port="sink_result 2" spacing="0"/>
    </process>
    </operator>
    </process>
  • roger_rutishausroger_rutishaus Member Posts: 8 Contributor II

    Thank you JEdward! Will give it a try

  • roger_rutishausroger_rutishaus Member Posts: 8 Contributor II

    Brian, it worked nicely. great!

    here's my solution (text and terms):

    <?xml version="1.0" encoding="UTF-8"?><process version="8.1.001">
    <context>
    <input/>
    <output/>
    <macros/>
    </context>
    <operator activated="true" class="process" compatibility="8.1.001" expanded="true" name="Process">
    <parameter key="logverbosity" value="init"/>
    <parameter key="random_seed" value="2001"/>
    <parameter key="send_mail" value="never"/>
    <parameter key="notification_email" value=""/>
    <parameter key="process_duration_for_mail" value="30"/>
    <parameter key="encoding" value="SYSTEM"/>
    <process expanded="true">
    <operator activated="true" class="concurrency:loop_files" compatibility="8.1.001" expanded="true" height="82" name="Loop Files (2)" width="90" x="45" y="34">
    <parameter key="directory" value="D:\_BT\Textanalyse\www.htwchur.ch\digital-science\forschung-und-dienstleistung"/>
    <parameter key="filter_type" value="glob"/>
    <parameter key="recursive" value="true"/>
    <parameter key="enable_macros" value="false"/>
    <parameter key="macro_for_file_name" value="file_name"/>
    <parameter key="macro_for_file_type" value="file_type"/>
    <parameter key="macro_for_folder_name" value="folder_name"/>
    <parameter key="reuse_results" value="false"/>
    <parameter key="enable_parallel_execution" value="true"/>
    <process expanded="true">
    <operator activated="true" class="text:read_document" compatibility="8.1.000" expanded="true" height="68" name="Read Document" width="90" x="112" y="34">
    <parameter key="extract_text_only" value="true"/>
    <parameter key="use_file_extension_as_type" value="true"/>
    <parameter key="content_type" value="txt"/>
    <parameter key="encoding" value="UTF-8"/>
    </operator>
    <connect from_port="file object" to_op="Read Document" to_port="file"/>
    <connect from_op="Read Document" from_port="output" to_port="output 1"/>
    <portSpacing port="source_file object" spacing="0"/>
    <portSpacing port="source_input 1" spacing="0"/>
    <portSpacing port="source_input 2" spacing="0"/>
    <portSpacing port="sink_output 1" spacing="0"/>
    <portSpacing port="sink_output 2" spacing="0"/>
    </process>
    </operator>
    <operator activated="true" class="text:process_documents" compatibility="8.1.000" expanded="true" height="103" name="Process Documents (2)" width="90" x="45" y="238">
    <parameter key="create_word_vector" value="true"/>
    <parameter key="vector_creation" value="TF-IDF"/>
    <parameter key="add_meta_information" value="true"/>
    <parameter key="keep_text" value="true"/>
    <parameter key="prune_method" value="none"/>
    <parameter key="prune_below_percent" value="3.0"/>
    <parameter key="prune_above_percent" value="30.0"/>
    <parameter key="prune_below_rank" value="0.05"/>
    <parameter key="prune_above_rank" value="0.95"/>
    <parameter key="datamanagement" value="double_sparse_array"/>
    <parameter key="data_management" value="auto"/>
    <process expanded="true">
    <operator activated="true" class="text:cut_document" compatibility="8.1.000" expanded="true" height="68" name="Cut Document" width="90" x="246" y="34">
    <parameter key="query_type" value="XPath"/>
    <list key="string_machting_queries"/>
    <parameter key="attribute_type" value="Nominal"/>
    <list key="regular_expression_queries"/>
    <list key="regular_region_queries"/>
    <list key="xpath_queries">
    <parameter key="p-class-bodytext" value="//h:p[@class=&amp;quot;bodytext&quot;]//h:p[text()]"/>
    <parameter key="p-class-bodytext-li" value="//h:div[@class=&amp;quot;conttext&quot;]//h:p[text()]"/>
    </list>
    <list key="namespaces"/>
    <parameter key="ignore_CDATA" value="true"/>
    <parameter key="assume_html" value="true"/>
    <list key="index_queries"/>
    <list key="jsonpath_queries"/>
    <process expanded="true">
    <operator activated="true" class="web:extract_html_text_content" compatibility="7.3.000" expanded="true" height="68" name="Extract Content (2)" width="90" x="112" y="34">
    <parameter key="extract_content" value="true"/>
    <parameter key="minimum_text_block_length" value="6"/>
    <parameter key="override_content_type_information" value="true"/>
    <parameter key="neglegt_span_tags" value="true"/>
    <parameter key="neglect_p_tags" value="true"/>
    <parameter key="neglect_b_tags" value="true"/>
    <parameter key="neglect_i_tags" value="true"/>
    <parameter key="neglect_br_tags" value="true"/>
    <parameter key="ignore_non_html_tags" value="true"/>
    </operator>
    <connect from_port="segment" to_op="Extract Content (2)" to_port="document"/>
    <connect from_op="Extract Content (2)" from_port="document" to_port="document 1"/>
    <portSpacing port="source_segment" spacing="0"/>
    <portSpacing port="sink_document 1" spacing="0"/>
    <portSpacing port="sink_document 2" spacing="0"/>
    </process>
    </operator>
    <operator activated="true" class="text:filter_documents_by_content" compatibility="8.1.000" expanded="true" height="82" name="Filter Documents (by Content)" width="90" x="447" y="34">
    <parameter key="condition" value="contains match"/>
    <parameter key="regular_expression" value="."/>
    <parameter key="case_sensitive" value="false"/>
    <parameter key="invert condition" value="false"/>
    </operator>
    <connect from_port="document" to_op="Cut Document" to_port="document"/>
    <connect from_op="Cut Document" from_port="documents" to_op="Filter Documents (by Content)" to_port="documents 1"/>
    <connect from_op="Filter Documents (by Content)" from_port="documents" to_port="document 1"/>
    <portSpacing port="source_document" spacing="0"/>
    <portSpacing port="sink_document 1" spacing="0"/>
    <portSpacing port="sink_document 2" spacing="0"/>
    </process>
    </operator>
    <operator activated="true" class="multiply" compatibility="8.1.001" expanded="true" height="103" name="Multiply (2)" width="90" x="179" y="238"/>
    <operator activated="true" class="remove_duplicates" compatibility="8.1.001" expanded="true" height="103" name="Remove Duplicates" width="90" x="380" y="340">
    <parameter key="attribute_filter_type" value="single"/>
    <parameter key="attribute" value="text"/>
    <parameter key="attributes" value=""/>
    <parameter key="use_except_expression" value="false"/>
    <parameter key="value_type" value="attribute_value"/>
    <parameter key="use_value_type_exception" value="false"/>
    <parameter key="except_value_type" value="time"/>
    <parameter key="block_type" value="attribute_block"/>
    <parameter key="use_block_type_exception" value="false"/>
    <parameter key="except_block_type" value="value_matrix_row_start"/>
    <parameter key="invert_selection" value="false"/>
    <parameter key="include_special_attributes" value="true"/>
    <parameter key="treat_missing_values_as_duplicates" value="true"/>
    </operator>
    <operator activated="true" class="text:data_to_documents" compatibility="8.1.000" expanded="true" height="68" name="Data to Documents" width="90" x="514" y="340">
    <parameter key="select_attributes_and_weights" value="true"/>
    <list key="specify_weights">
    <parameter key="text" value="1.0"/>
    </list>
    </operator>
    <operator activated="true" class="text:process_documents" compatibility="8.1.000" expanded="true" height="103" name="Process Documents" width="90" x="648" y="340">
    <parameter key="create_word_vector" value="true"/>
    <parameter key="vector_creation" value="TF-IDF"/>
    <parameter key="add_meta_information" value="true"/>
    <parameter key="keep_text" value="false"/>
    <parameter key="prune_method" value="none"/>
    <parameter key="prune_below_percent" value="3.0"/>
    <parameter key="prune_above_percent" value="30.0"/>
    <parameter key="prune_below_rank" value="0.05"/>
    <parameter key="prune_above_rank" value="0.95"/>
    <parameter key="datamanagement" value="double_sparse_array"/>
    <parameter key="data_management" value="auto"/>
    <process expanded="true">
    <operator activated="true" class="text:tokenize" compatibility="8.1.000" expanded="true" height="68" name="Tokenize (2)" width="90" x="45" y="34">
    <parameter key="mode" value="non letters"/>
    <parameter key="characters" value=".:"/>
    <parameter key="language" value="English"/>
    <parameter key="max_token_length" value="3"/>
    </operator>
    <operator activated="true" class="text:filter_stopwords_english" compatibility="8.1.000" expanded="true" height="68" name="Filter Stopwords (2)" width="90" x="179" y="34"/>
    <operator activated="true" class="text:filter_stopwords_french" compatibility="8.1.000" expanded="true" height="68" name="Filter Stopwords (3)" width="90" x="313" y="34"/>
    <operator activated="true" class="text:filter_by_length" compatibility="8.1.000" expanded="true" height="68" name="Filter Tokens (2)" width="90" x="447" y="34">
    <parameter key="min_chars" value="4"/>
    <parameter key="max_chars" value="200"/>
    </operator>
    <operator activated="true" class="text:filter_tokens_by_pos" compatibility="8.1.000" expanded="true" height="68" name="Filter Tokens (by POS Tags)" width="90" x="581" y="34">
    <parameter key="language" value="German"/>
    <parameter key="expression" value="NN"/>
    <parameter key="invert_filter" value="false"/>
    </operator>
    <connect from_port="document" to_op="Tokenize (2)" to_port="document"/>
    <connect from_op="Tokenize (2)" from_port="document" to_op="Filter Stopwords (2)" to_port="document"/>
    <connect from_op="Filter Stopwords (2)" from_port="document" to_op="Filter Stopwords (3)" to_port="document"/>
    <connect from_op="Filter Stopwords (3)" from_port="document" to_op="Filter Tokens (2)" to_port="document"/>
    <connect from_op="Filter Tokens (2)" from_port="document" to_op="Filter Tokens (by POS Tags)" to_port="document"/>
    <connect from_op="Filter Tokens (by POS Tags)" from_port="document" to_port="document 1"/>
    <portSpacing port="source_document" spacing="0"/>
    <portSpacing port="sink_document 1" spacing="0"/>
    <portSpacing port="sink_document 2" spacing="0"/>
    </process>
    </operator>
    <operator activated="true" class="text:wordlist_to_data" compatibility="8.1.000" expanded="true" height="82" name="WordList to Data" width="90" x="782" y="391"/>
    <operator activated="true" class="remove_duplicates" compatibility="8.1.001" expanded="true" height="103" name="Remove Duplicates (2)" width="90" x="380" y="34">
    <parameter key="attribute_filter_type" value="single"/>
    <parameter key="attribute" value="text"/>
    <parameter key="attributes" value=""/>
    <parameter key="use_except_expression" value="false"/>
    <parameter key="value_type" value="attribute_value"/>
    <parameter key="use_value_type_exception" value="false"/>
    <parameter key="except_value_type" value="time"/>
    <parameter key="block_type" value="attribute_block"/>
    <parameter key="use_block_type_exception" value="false"/>
    <parameter key="except_block_type" value="value_matrix_row_start"/>
    <parameter key="invert_selection" value="false"/>
    <parameter key="include_special_attributes" value="true"/>
    <parameter key="treat_missing_values_as_duplicates" value="true"/>
    </operator>
    <connect from_port="input 1" to_op="Loop Files (2)" to_port="input 1"/>
    <connect from_op="Loop Files (2)" from_port="output 1" to_op="Process Documents (2)" to_port="documents 1"/>
    <connect from_op="Process Documents (2)" from_port="example set" to_op="Multiply (2)" to_port="input"/>
    <connect from_op="Multiply (2)" from_port="output 1" to_op="Remove Duplicates (2)" to_port="example set input"/>
    <connect from_op="Multiply (2)" from_port="output 2" to_op="Remove Duplicates" to_port="example set input"/>
    <connect from_op="Remove Duplicates" from_port="example set output" to_op="Data to Documents" to_port="example set"/>
    <connect from_op="Data to Documents" from_port="documents" to_op="Process Documents" to_port="documents 1"/>
    <connect from_op="Process Documents" from_port="example set" to_port="result 2"/>
    <connect from_op="Process Documents" from_port="word list" to_op="WordList to Data" to_port="word list"/>
    <connect from_op="WordList to Data" from_port="example set" to_port="result 3"/>
    <connect from_op="Remove Duplicates (2)" from_port="example set output" to_port="result 1"/>
    <portSpacing port="source_input 1" spacing="0"/>
    <portSpacing port="source_input 2" spacing="0"/>
    <portSpacing port="sink_result 1" spacing="0"/>
    <portSpacing port="sink_result 2" spacing="0"/>
    <portSpacing port="sink_result 3" spacing="0"/>
    <portSpacing port="sink_result 4" spacing="0"/>
    </process>
    </operator>
    </process>

    only terms:

    <?xml version="1.0" encoding="UTF-8"?><process version="8.1.001">
    <context>
    <input/>
    <output/>
    <macros/>
    </context>
    <operator activated="true" class="process" compatibility="8.1.001" expanded="true" name="Process">
    <parameter key="logverbosity" value="init"/>
    <parameter key="random_seed" value="2001"/>
    <parameter key="send_mail" value="never"/>
    <parameter key="notification_email" value=""/>
    <parameter key="process_duration_for_mail" value="30"/>
    <parameter key="encoding" value="UTF-8"/>
    <process expanded="true">
    <operator activated="true" class="concurrency:loop_files" compatibility="8.1.001" expanded="true" height="82" name="Loop Files (2)" width="90" x="112" y="34">
    <parameter key="directory" value="D:\_BT\Textanalyse\www.htwchur.ch\digital-science\forschung-und-dienstleistung\institut-sii\forschungsfelder"/>
    <parameter key="filter_type" value="glob"/>
    <parameter key="recursive" value="true"/>
    <parameter key="enable_macros" value="false"/>
    <parameter key="macro_for_file_name" value="file_name"/>
    <parameter key="macro_for_file_type" value="file_type"/>
    <parameter key="macro_for_folder_name" value="folder_name"/>
    <parameter key="reuse_results" value="false"/>
    <parameter key="enable_parallel_execution" value="true"/>
    <process expanded="true">
    <operator activated="true" class="text:read_document" compatibility="8.1.000" expanded="true" height="68" name="Read Document" width="90" x="112" y="34">
    <parameter key="extract_text_only" value="true"/>
    <parameter key="use_file_extension_as_type" value="true"/>
    <parameter key="content_type" value="txt"/>
    <parameter key="encoding" value="UTF-8"/>
    </operator>
    <connect from_port="file object" to_op="Read Document" to_port="file"/>
    <connect from_op="Read Document" from_port="output" to_port="output 1"/>
    <portSpacing port="source_file object" spacing="0"/>
    <portSpacing port="source_input 1" spacing="0"/>
    <portSpacing port="source_input 2" spacing="0"/>
    <portSpacing port="sink_output 1" spacing="0"/>
    <portSpacing port="sink_output 2" spacing="0"/>
    </process>
    </operator>
    <operator activated="true" class="loop_collection" compatibility="8.1.001" expanded="true" height="82" name="Loop Collection" width="90" x="246" y="34">
    <parameter key="set_iteration_macro" value="false"/>
    <parameter key="macro_name" value="iteration"/>
    <parameter key="macro_start_value" value="1"/>
    <parameter key="unfold" value="true"/>
    <process expanded="true">
    <operator activated="true" class="text:cut_document" compatibility="8.1.000" expanded="true" height="68" name="Cut Document" width="90" x="112" y="34">
    <parameter key="query_type" value="XPath"/>
    <list key="string_machting_queries"/>
    <parameter key="attribute_type" value="Nominal"/>
    <list key="regular_expression_queries"/>
    <list key="regular_region_queries"/>
    <list key="xpath_queries">
    <parameter key="p-class-bodytext" value="//h:p[@class=&amp;quot;bodytext&quot;]//h:p[text()]"/>
    <parameter key="div-class-conttext" value="//h:div[@class=&amp;quot;conttext&quot;]//h:p[text()]"/>
    </list>
    <list key="namespaces"/>
    <parameter key="ignore_CDATA" value="true"/>
    <parameter key="assume_html" value="true"/>
    <list key="index_queries"/>
    <list key="jsonpath_queries"/>
    <process expanded="true">
    <operator activated="false" class="text:transform_cases" compatibility="8.1.000" expanded="true" height="68" name="Transform Cases" width="90" x="112" y="544">
    <parameter key="transform_to" value="lower case"/>
    </operator>
    <operator activated="false" class="text:filter_tokens_by_pos" compatibility="8.1.000" expanded="true" height="68" name="Filter Tokens (by POS Tags)" width="90" x="179" y="238">
    <parameter key="language" value="German"/>
    <parameter key="expression" value="NN"/>
    <parameter key="invert_filter" value="false"/>
    </operator>
    <operator activated="true" class="web:extract_html_text_content" compatibility="7.3.000" expanded="true" height="68" name="Extract Content" width="90" x="45" y="34">
    <parameter key="extract_content" value="true"/>
    <parameter key="minimum_text_block_length" value="4"/>
    <parameter key="override_content_type_information" value="true"/>
    <parameter key="neglegt_span_tags" value="true"/>
    <parameter key="neglect_p_tags" value="true"/>
    <parameter key="neglect_b_tags" value="true"/>
    <parameter key="neglect_i_tags" value="true"/>
    <parameter key="neglect_br_tags" value="true"/>
    <parameter key="ignore_non_html_tags" value="true"/>
    </operator>
    <operator activated="true" class="text:tokenize" compatibility="8.1.000" expanded="true" height="68" name="Tokenize (2)" width="90" x="179" y="34">
    <parameter key="mode" value="non letters"/>
    <parameter key="characters" value=".:"/>
    <parameter key="language" value="English"/>
    <parameter key="max_token_length" value="3"/>
    </operator>
    <operator activated="true" class="text:filter_stopwords_english" compatibility="8.1.000" expanded="true" height="68" name="Filter Stopwords (2)" width="90" x="45" y="136"/>
    <operator activated="true" class="text:filter_stopwords_french" compatibility="8.1.000" expanded="true" height="68" name="Filter Stopwords (3)" width="90" x="179" y="136"/>
    <operator activated="true" class="text:filter_by_length" compatibility="8.1.000" expanded="true" height="68" name="Filter Tokens (2)" width="90" x="45" y="238">
    <parameter key="min_chars" value="4"/>
    <parameter key="max_chars" value="200"/>
    </operator>
    <operator activated="true" class="text:extract_token_number" compatibility="8.1.000" expanded="true" height="68" name="Extract Token Number" width="90" x="313" y="238">
    <parameter key="metadata_key" value="token_number"/>
    <parameter key="condition" value="all"/>
    <parameter key="case_sensitive" value="false"/>
    <parameter key="invert_condition" value="false"/>
    </operator>
    <connect from_port="segment" to_op="Extract Content" to_port="document"/>
    <connect from_op="Extract Content" from_port="document" to_op="Tokenize (2)" to_port="document"/>
    <connect from_op="Tokenize (2)" from_port="document" to_op="Filter Stopwords (2)" to_port="document"/>
    <connect from_op="Filter Stopwords (2)" from_port="document" to_op="Filter Stopwords (3)" to_port="document"/>
    <connect from_op="Filter Stopwords (3)" from_port="document" to_op="Filter Tokens (2)" to_port="document"/>
    <connect from_op="Filter Tokens (2)" from_port="document" to_op="Extract Token Number" to_port="document"/>
    <connect from_op="Extract Token Number" from_port="document" to_port="document 1"/>
    <portSpacing port="source_segment" spacing="0"/>
    <portSpacing port="sink_document 1" spacing="0"/>
    <portSpacing port="sink_document 2" spacing="0"/>
    </process>
    </operator>
    <operator activated="true" class="text:filter_documents_by_content" compatibility="8.1.000" expanded="true" height="82" name="Filter Documents (by Content)" width="90" x="380" y="34">
    <parameter key="condition" value="contains match"/>
    <parameter key="regular_expression" value="."/>
    <parameter key="case_sensitive" value="false"/>
    <parameter key="invert condition" value="false"/>
    </operator>
    <operator activated="true" class="text:process_documents" compatibility="8.1.000" expanded="true" height="103" name="Process Documents" width="90" x="581" y="34">
    <parameter key="create_word_vector" value="true"/>
    <parameter key="vector_creation" value="TF-IDF"/>
    <parameter key="add_meta_information" value="true"/>
    <parameter key="keep_text" value="true"/>
    <parameter key="prune_method" value="none"/>
    <parameter key="prune_below_percent" value="3.0"/>
    <parameter key="prune_above_percent" value="30.0"/>
    <parameter key="prune_below_rank" value="0.05"/>
    <parameter key="prune_above_rank" value="0.95"/>
    <parameter key="datamanagement" value="double_sparse_array"/>
    <parameter key="data_management" value="auto"/>
    <process expanded="true">
    <connect from_port="document" to_port="document 1"/>
    <portSpacing port="source_document" spacing="0"/>
    <portSpacing port="sink_document 1" spacing="0"/>
    <portSpacing port="sink_document 2" spacing="0"/>
    </process>
    </operator>
    <operator activated="true" class="write_excel" compatibility="8.1.001" expanded="true" height="82" name="Write Excel" width="90" x="715" y="34">
    <parameter key="excel_file" value="D:\_BT\Textanalyse\saveTextInTable3.xlsx"/>
    <parameter key="file_format" value="xlsx"/>
    <parameter key="encoding" value="SYSTEM"/>
    <parameter key="sheet_name" value="RapidMiner Data"/>
    <parameter key="date_format" value="yyyy-MM-dd HH:mm:ss"/>
    <parameter key="number_format" value="#.0"/>
    </operator>
    <connect from_port="single" to_op="Cut Document" to_port="document"/>
    <connect from_op="Cut Document" from_port="documents" to_op="Filter Documents (by Content)" to_port="documents 1"/>
    <connect from_op="Filter Documents (by Content)" from_port="documents" to_op="Process Documents" to_port="documents 1"/>
    <connect from_op="Process Documents" from_port="example set" to_op="Write Excel" to_port="input"/>
    <connect from_op="Write Excel" from_port="through" to_port="output 1"/>
    <portSpacing port="source_single" spacing="0"/>
    <portSpacing port="sink_output 1" spacing="0"/>
    <portSpacing port="sink_output 2" spacing="0"/>
    </process>
    </operator>
    <connect from_port="input 1" to_op="Loop Files (2)" to_port="input 1"/>
    <connect from_op="Loop Files (2)" from_port="output 1" to_op="Loop Collection" to_port="collection"/>
    <connect from_op="Loop Collection" from_port="output 1" to_port="result 1"/>
    <portSpacing port="source_input 1" spacing="0"/>
    <portSpacing port="source_input 2" spacing="0"/>
    <portSpacing port="sink_result 1" spacing="0"/>
    <portSpacing port="sink_result 2" spacing="0"/>
    </process>
    </operator>
    </process>

     

Sign In or Register to comment.