diff --git a/2015-09-29-mapanet.md b/2015-09-29-mapanet.md
new file mode 100644
index 00000000..0ca8b7a7
--- /dev/null
+++ b/2015-09-29-mapanet.md
@@ -0,0 +1,139 @@
+I have read and understand GitHub's Guide to Filing a DMCA Notice
+
+I have a good faith belief that use of the copyrighted materials described
+above on the infringing web pages is not authorized by the copyright owner,
+or its agent, or the law.
+
+I swear, under penalty of perjury, that the information in this
+notification is accurate and that I am the copyright owner, or am
+authorized to act on behalf of the owner, of an exclusive right that is
+allegedly infringed.
+
+[PRIVATE]
+
+*Github user info:*
+Name: [PRIVATE]
+User: raihanhbh
+Web: ArcticIntelligence.com
+Join: Joined on Mar 28, 2011
+Repository: arcticintelligence/yell-scrapper (copy of script below)
+URL:
+https://github.com/arcticintelligence/yell-scrapper/blob/master/mapanet_curl.php
+URL: https://github.com/raihanhbh
+User social pages: [PRIVATE]
+
+*Infringement description:*
+The php script was designed to gather information from our mapanet.eu web
+pages by trying to execute the find.asp page and gather the results and
+store them, we guess to use it, make it public in raw format or sell it.
+In any case or copyright notice at: http://www.mapanet.eu/EN/terms.htm
+which in shot sates:
+
+All and any texts, data and images contained in the website mapanet.eu, are
+copyrighted and all rights are reserved worldwide. Therefore and according
+to copyright laws, only a private usage is allowed without prejudice of any
+other enforcement, possibly more restrictive, of the above mentioned
+copyright laws.
+
+The script is provided publicly in Github allowing anybody to use it to
+collect privileged information from our site.
+
+*Proof*:
+The offense script below clearly has the path of our website: $url = '
+http://www.mapanet.eu/EN/Postal_Codes/Find.asp';
+And the unique parameters: $fields = array('Action' => urlencode('ok'), 'C'
+=> urlencode('CN'), 'F' => urlencode(''), 'GL' => urlencode(''), 'L' =>
+urlencode(0).... etc.
+Lines on the code below are in red.
+
+*Ownership*:
+Our domain is provided by EuroDNS.com where you can verify ownership to my
+name: [PRIVATE]
+
+We ask Github to retire the content of:
+https://github.com/arcticintelligence/yell-scrapper/blob/master/mapanet_curl.php
+and penalize the user who published it.
+
+0) {
+//Save to file.
+$file = fopen("parsed-data_".date('Y_m_d_h_i_s').".csv","w");
+}
+foreach($postal_codes as $key => $postal_code) {
+echo $key+1 . ">>>>" . mb_convert_encoding($postal_code,
+'HTML-ENTITIES', "UTF-8") . "
" . "\n";
+get_post_html($postal_code, $file);
+}
+//Close the file pointer;
+if(count($postal_codes)>0) {
+fclose($file);
+}
+function get_post_html($postal_code, $file)
+$fields_string = '';
+//set POST variables
+*$url = 'http://www.mapanet.eu/EN/Postal_Codes/Find.asp
+';*
+$fields = array(
+'Action' => urlencode('ok'),
+'C' => urlencode('CN'),
+'F' => urlencode(''),
+'GL' => urlencode(''),
+'L' => urlencode(0),
+'N' => urlencode(3),
+'PC' => urlencode(''),
+'T' => urlencode($postal_code),
+'page' => urlencode('')
+);
+//url-ify the data for the POST
+foreach($fields as $key=>$value) { $fields_string .=
+$key.'='.$value.'&'; }
+rtrim($fields_string, '&');
+//open connection
+$ch = curl_init();
+$timeout = 5;
+//set the url, number of POST vars, POST data
+curl_setopt($ch,CURLOPT_URL, $url);
+curl_setopt($ch,CURLOPT_HEADER, FALSE);
+curl_setopt($ch, CURLOPT_RETURNTRANSFER, true);
+curl_setopt($ch,CURLOPT_POST, count($fields));
+curl_setopt($ch,CURLOPT_POSTFIELDS, $fields_string);
+curl_setopt($ch, CURLOPT_CONNECTTIMEOUT, $timeout);
+curl_setopt($ch,CURLOPT_USERAGENT,'Mozilla/5.0 (Windows; U; Windows NT
+5.1; en-US; rv:1.8.1.13) Gecko/20080311 Firefox/2.0.0.13');
+//execute post
+$html = curl_exec($ch);
+//close connection
+curl_close($ch);
+parse_html($html, $postal_code, $file);
+}
+function parse_html($html, $postal_code, $file) {
+# Create a DOM parser object
+$dom = new DOMDocument();
+# Parse the HTML from Google.
+# The @ before the method call suppresses any warnings that
+# loadHTML might throw because of invalid HTML in the page.
+@$dom->loadHTML($html);
+# Iterate over all the tags
+foreach($dom->getElementsByTagName('a') as $link) {
+echo $href = $link->getAttribute('href') ."
";
+echo $value = $link->nodeValue . "
";
+if( strpos($href, 'Postal_Codes') > 0 &&
+strpos($href, 'R2=') > 0 &&
+strpos($href, 'page=') > 0
+) {
+echo $content = $postal_code . ", " . $value . "\n";
+// Entry row to file.
+fwrite($file, $content);
+}
+}
+}
+
+Regards
+
+[PRIVATE]