Is modifying a class variable in python threadsafe?

It's not threadsafe even on CPython. Try this to see for yourself:

import threading

class Foo(object):
    instance_count = 0

def inc_by(n):
    for i in xrange(n):
        Foo.instance_count += 1

threads = [threading.Thread(target=inc_by, args=(100000,)) for thread_nr in xrange(100)]
for thread in threads: thread.start()
for thread in threads: thread.join()

print(Foo.instance_count) # Expected 10M for threadsafe ops, I get around 5M

The reason is that while INPLACE_ADD is atomic under GIL, the attribute is still loaded and store (see dis.dis(Foo.__init__)). Use a lock to serialize the access to the class variable:

Foo.lock = threading.Lock()

def interlocked_inc(n):
    for i in xrange(n):
        with Foo.lock:
            Foo.instance_count += 1

threads = [threading.Thread(target=interlocked_inc, args=(100000,)) for thread_nr in xrange(100)]
for thread in threads: thread.start()
for thread in threads: thread.join()

print(Foo.instance_count)

Following on from luc's answer, here's a simplified decorator using with context manager and a little __main__ code to spin up the test. Try it with and without the @synchronized decorator to see the difference.

import concurrent.futures
import functools
import logging
import threading


def synchronized(function):
    lock = threading.Lock()
    @functools.wraps(function)
    def wrapper(self, *args, **kwargs):
        with lock:
            return function(self, *args, **kwargs)
    return wrapper


class Foo:
    counter = 0

    @synchronized
    def increase(self):
        Foo.counter += 1


if __name__ == "__main__":
    foo = Foo()
    print(f"Start value is {foo.counter}")
    with concurrent.futures.ThreadPoolExecutor(max_workers=10) as executor:
        for index in range(200000):
            executor.submit(foo.increase)
    print(f"End value is {foo.counter}")

Without @synchronized
End value is 198124
End value is 196827
End value is 197968

With @synchronized
End value is 200000
End value is 200000
End value is 200000

No it is not thread safe. I've faced a similar problem a few days ago, and I chose to implement the lock thanks to a decorator. The benefit is that it makes the code readable:

def threadsafe_function(fn):
    """decorator making sure that the decorated function is thread safe"""
    lock = threading.Lock()
    def new(*args, **kwargs):
        lock.acquire()
        try:
            r = fn(*args, **kwargs)
        except Exception as e:
            raise e
        finally:
            lock.release()
        return r
    return new

class X:
    var = 0

    @threadsafe_function     
    def inc_var(self):
        X.var += 1    
        return X.var