Jan-03-2017, 07:38 AM
this code lets you add/delete/modify the sources as a list of lambda functions that call various functions to process the received data. since various sources can be rather slow and i wanted to use many of those in my list i decided to run multiple queries in parallel. so it uses multiprocessing. it requires python2 for now, since python3 removed urllib2.
#!/usr/bin/env python # -*- coding: utf-8 -*- from __future__ import division, print_function, unicode_literals """ file getmyip.py purpose Get the public IP address of this host command python getmyip.py function getmyip() aruments none note this script accesses multiple web sites in parallel. email 10054452614123394844460370234029112340408691 The intent is that this command works correctly under both Python 2 and Python 3. Please report failures or code improvement to the author. """ __license__ = """ Copyright (C) 2017, by Phil D. Howard - all other rights reserved Permission to use, copy, modify, and/or distribute this software for any purpose with or without fee is hereby granted, provided that the above copyright notice and this permission notice appear in all copies. THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA, OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE, OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. The author may be contacted by decoding the number 10054452614123394844460370234029112340408691 (provu igi la numeron al duuma) """ from collections import Counter from multiprocessing import Pipe, Process from sys import argv, exc_info, stderr, stdout from time import sleep from urllib2 import URLError, urlopen import os, sys http_timeout = 30 site_list = [ lambda: get_from_json( 'http://127.0.0.1:4096/', key='addr' ), lambda: get_from_json( 'https://api.ipify.org?format=json', key='ip' ), lambda: get_from_json( 'http://jsonip.com/', key='ip' ), lambda: get_from_json( 'http://ipinfo.io/json', key='ip' ), lambda: get_from_json( 'https://ifconfig.co/json', key='ip' ), lambda: get_from_json( 'http://ip-api.com/json', key='query' ), lambda: get_from_json( 'http://linuxhomepage.com:2000/', key='addr' ), lambda: get_http( 'http://icanhazip.com/' ), lambda: get_http( 'http://ipecho.net/plain' ), lambda: get_http( 'http://ipinfo.io/ip' ), lambda: get_http( 'http://whatismyip.akamai.com/' ), lambda: get_http( 'http://v4.ipv6-test.com/api/myip.php' ), lambda: get_split( 'http://checkip.dyndns.org/', None, 5 ), ] def get_http( url ): if True: conn = urlopen( url, timeout = http_timeout ) data = conn.read().strip() else: data = None return data def get_split( url, s, x ): data = get_http( url ) if data == None: return None return data.split(s)[x].strip() def get_json( url ): from json import loads data = get_http( url ) if data == None: return None return loads( data ) def get_from_json( url, key = 'ipaddress' ): result = get_json( url ) if result == None: return None return result[ key ].strip() def get_ip_in_child( site_index, this_pipe ): """ function get_ip_in_child purpose run in each of many child processes to get the IP address of this host as one IP address server site sees it. argument 1 (int) index (starting at 0) of which site to access. outputs IP address returns (int) return status code """ if site_index >= len( site_list ): return -1 os.close(this_pipe[0]) addr = site_list[site_index]() while len(addr) > 0: sub = os.write(this_pipe[1],addr) addr = addr[sub:] os.close(this_pipe[1]) return 0 def get_many_ips( number_of_sites ): pipes = [] procs = [] addrs = [] sleepy = 3.0 / 4.0 site_range = range( number_of_sites ) for site_index in site_range: this_pipe = os.pipe() this_proc = Process( target = get_ip_in_child, args = (site_index,this_pipe) ) pipes.append( this_pipe ) procs.append( this_proc ) this_proc.start() os.close(this_pipe[1]) for site_index in site_range: addr = '' while True: data = os.read(pipes[site_index][0],4096) if len(data) > 0: addr += data if len(data) < 1: break addrs.append( addr ) for site_index in site_range: procs[site_index].join( 360 ) return addrs def getmyip(): tutaj = Counter() addrs = get_many_ips( len( site_list ) ) for addr in addrs: tutaj[addr] += 1 addr = tutaj.most_common(1)[0][0] agree = tutaj.most_common(1)[0][1] sources = len(addrs) return (addr,agree,sources) def main( args ): addr,agree,sources = getmyip() if addr == None: print( 'failed to get IP address, got:', repr(addr), file=stderr ) return 1 if not isinstance( addr, basestring ): print( 'failed to get an IP address, got:', repr(addr), file=stderr ) return 2 if len( addr ) < 9: print( 'failed to get valid IP address, got:', repr(addr), file=stderr ) return 3 if '.' not in addr: print( 'failed to get a valid IP address, got:', repr(addr), file=stderr ) return 4 print( '{} of {} sources agree that your IP address is:'.format(repr(agree),repr(sources)), file=stderr ) stderr.flush() print( addr ) return 0 if __name__ == '__main__': try: result = main( argv ) stdout.flush() except KeyboardInterrupt: result = 141 print( '' ) except IOError: result = 142 try: exit( int( result ) ) except ValueError: print( str( result ), file=stderr ) exit( 1 ) except TypeError: if result == None: exit( 0 ) exit( 255 ) # EOF