I got curious, so I did a little test run ...
# use Python3's dictionary comprehension to
# speed up list searches
#
# the time it takes to create the dictionary
# will be regained after about 6 searches
# as list size increases this goes down to > 1 search
# tested with Python 3.1.1 vegaseat
import timeit
data = """\
Bill
Brutus
Daphne
Dorky
Al
Kristin
Cloe
Carlos
Pete
Pheobe
Jerry
Jack
Justin
John
Julie
Joe
Moe
Theo
Albert
Alberto
Pauline
Paula
Christopher
Gisela
Lamar
Donna
Demitrius
Frank
Heidi
Margot
Cindy
Doris
Harry
Larry
Dilbert
Mary
Robert
Sophia
Samuel
Candy
Tawny
Terry
Markus
Veronika
Norbert
Zoe
Udo"""
# create a list of names from the data
mylist = data.split('\n')
# create a dictionary with name:zero pairs from the list
mylist_d = {name:0 for name in mylist}
# search for 'Udo' is the last item in the list and dictionary
statement = "'Udo' in mylist"
setup = "from __main__ import mylist"
t = timeit.Timer(statement, setup)
# doing 1000000 passes (default) gives microseconds per pass
elapsed = t.timeit()
sf = "Code %-20s takes %0.3f micro-seconds/pass"
print( sf % (statement, elapsed))
statement = "'Udo' in mylist_d"
setup = "from __main__ import mylist_d"
t = timeit.Timer(statement, setup)
elapsed = t.timeit()
sf = "Code %-20s takes %0.3f micro-seconds/pass"
print( sf % (statement, elapsed))
print('-'*60)
statement = "{name:0 for name in mylist}"
setup = "from __main__ import mylist"
t = timeit.Timer(statement, setup)
elapsed = t.timeit()
sf = "Code %-20s takes %0.3f micro-seconds/pass" …